Skip to content
This repository was archived by the owner on Jul 21, 2022. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions conductor/lib/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,3 +487,32 @@ def strip_drive_letter(filepath):
'''
rx_drive = r'^[a-z]:'
return re.sub(rx_drive, "", filepath, flags=re.I)

def expand_paths(paths):
'''
Expand a list of paths using glob
'''

expanded_paths = []

for path in paths:

expanded = [ os.path.normpath(p) for p in glob.glob(path.strip()) ]
logger.debug("'{}' expanded to {}".format(path, expanded))
expanded_paths.extend(expanded)

return expanded_paths

def expand_paths_from_file(path):
'''
Opens a file that contains glob-style patterns and returns a list of all
matching files. The file must contain one glob-style pattern per-line.

path: A string. The path to the file that contains glob patterns
'''

with open(path) as fh:
glob_patterns = fh.readlines()

return expand_paths(glob_patterns)

137 changes: 84 additions & 53 deletions conductor/lib/maya_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,21 +472,33 @@ def get_render_layers_info():
return render_layers


def collect_dependencies(node_attrs):
def collect_dependencies(node_attrs, leaf_paths=None, exclude_paths=None):
'''
Return a list of filepaths that the current maya scene has dependencies on.
This is achieved by inspecting maya's nodes. Use the node_attrs argument
to pass in a dictionary
'''

leaf_paths: A list of strings. A list of paths to skip for nested dependencies.
Ex: A maya file that itself will be included but not textures, or
other nested references will be.
exclude_paths: A list of strings. A list of paths to exclude from the
dependency scanner.
'''

assert isinstance(node_attrs, dict), "node_attrs arg must be a dict. Got %s" % type(node_attrs)

leaf_paths = leaf_paths or []
exclude_paths = exclude_paths or []

# TODO: Temporary hack to work around renderman 23.3 bug.
# Record the active renderer so that we can restore it after making this cmds.file call
active_renderer = get_active_renderer()
# Note that this command will often times return filepaths with an ending "/" on it for some reason. Strip this out at the end of the function
dependencies = cmds.file(query=True, list=True, withoutCopyNumber=True) or []

# Strip errant dependences (another part of the renderman bug above).
dependencies = [path for path in dependencies if not path.endswith('_<user')]
# normpath is used as it's possible (in Windows) to have a path with mixed back/forward slashes
dependencies = [os.path.normpath(path) for path in dependencies if not path.endswith('_<user')]
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why the normpath?

Copy link
Copy Markdown
Contributor Author

@jesseconductor jesseconductor Sep 30, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In my tests I found that I could easily end-up with a path that had mixed forward and back slashes (on Windows). I added that as a comment.

logger.debug("maya scene base dependencies: %s", dependencies)
# Reinstate active renderer
cmds.setAttr("defaultRenderGlobals.currentRenderer", active_renderer, type="string")
Expand Down Expand Up @@ -516,42 +528,57 @@ def collect_dependencies(node_attrs):
# directory (i.e. it doesn't have any real smarts about path resolution, etc).
# NOTE: that this command will oftentimes return filepaths with an ending "/" on
# it for some reason. Strip this out at the end of the function
path = cmds.file(plug_value, expandName=True, query=True, withoutCopyNumber=True)
#
# normpath() is used as it's possible (in Windows) to have a path with mixed back/forward slashes
path = os.path.normpath(cmds.file(plug_value, expandName=True, query=True, withoutCopyNumber=True))
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why the normpath?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same answer as above...

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In general, I've tried to narrow the places in our client code for where/when paths are manipulated/conditioned. The majority of the time it occurs after dependency scraping (rather than before, or during). Specifically the process_upload_filepath function is called to explode/resolve/normalize any paths that come out of the dependency scraping phase. See here.
There certainly are exceptions to this (process_upload_filepath gets called several times within the collect_dependencies function), but they occur within isolated logic leaves (not affecting other logic leaves).
I'm not saying that this will break anything. But I wouldn't be surprised if it did.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's needed hear so that it can be compared to values in exlude_paths and leaf_paths

logger.debug("%s: %s", plug_name, path)


if path in exclude_paths:
logger.info("Skipping depedency '{}' - in exclusion list".format(path))
continue

if path in leaf_paths:
logger.info("Skipping nested scanning of '{}' - in leaf list".format(path))
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The log entry message doesn't match the code.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you continued'd here, you'd be able to remove the duplicated if path not in leaf_path_list: in this function.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No.. because the path itself still needs to be added to the list of dependencies on line 640. I struggled on how to do this cleanly... It really needs a class-based approach. I know @hoolymama did a big refactor on the dependency scanner. Perhaps we can merge that in to take advantage of the refactor?


# ---- XGEN SCRAPING -----
# For xgen files, read the .xgen file and parse out the directory where other dependencies may exist
if node_type == "xgmPalette":
maya_filepath = cmds.file(query=True, sceneName=True)
palette_filepath = os.path.join(os.path.dirname(maya_filepath), plug_value)
xgen_dependencies = scrape_palette_node(node, palette_filepath) + [palette_filepath]
logger.debug("xgen_dependencies: %s", xgen_dependencies)
dependencies += xgen_dependencies

if path not in leaf_paths:
maya_filepath = cmds.file(query=True, sceneName=True)
palette_filepath = os.path.join(os.path.dirname(maya_filepath), plug_value)
xgen_dependencies = scrape_palette_node(node, palette_filepath) + [palette_filepath]
logger.debug("xgen_dependencies: %s", xgen_dependencies)
dependencies += xgen_dependencies

# continue here so that we don't append the path to dependencies later on.
# (the path that would have been appended is actually not the correct path).
continue

# ---- VRAY SCRAPING -----
if node_type == "VRayScene":
vrscene_dependencies = parse_vrscene_file(path)
logger.debug("vrscene dependencies: %s" % vrscene_dependencies)
dependencies += vrscene_dependencies
if path not in leaf_paths:
vrscene_dependencies = parse_vrscene_file(path)
logger.debug("vrscene dependencies: %s" % vrscene_dependencies)
dependencies += vrscene_dependencies

# ---- YETI SCRAPING -----
if node_type == "pgYetiMaya":
yeti_dependencies = scrape_yeti_graph(node)
logger.debug("yeti dependencies: %s" % yeti_dependencies)
dependencies += yeti_dependencies

# Check whether the node is reading from disk or not.
# If it's not, then we shouldn't include the path as
# a dependency
if not cmds.getAttr('%s.fileMode' % node):
logger.debug("Skipping path because fileMode is disabled")
continue
if path not in leaf_paths:
yeti_dependencies = scrape_yeti_graph(node)
logger.debug("yeti dependencies: %s" % yeti_dependencies)
dependencies += yeti_dependencies

# Check whether the node is reading from disk or not.
# If it's not, then we shouldn't include the path as
# a dependency
if not cmds.getAttr('%s.fileMode' % node):
logger.debug("Skipping path because fileMode is disabled")
continue

# ---- ARNOLD STANDIN SCRAPING -----
if node_type == "aiStandIn":

# We expect an aiStandin node to point towards and .ass file (or sequence thereof)
# Instead of loading/reading the .ass file now, simply append to a list
# that we'll process all at one time (*much faster*)
Expand All @@ -561,8 +588,9 @@ def collect_dependencies(node_attrs):
# file in an .ass sequence will have the same file dependencies, so don't bother reading every
# ass file. Perhaps dangerous, but we'll cross that bridge later (it's better than reading/loading
# potentially thousands of .ass files)
ass_filepath = file_utils.process_upload_filepath(path, strict=True)[0]
ass_filepaths.append(ass_filepath)
if path not in leaf_paths:
ass_filepath = file_utils.process_upload_filepath(path, strict=True)[0]
ass_filepaths.append(ass_filepath)

# ---- RENDERMAN RLF files -----
# If the node type is a RenderManArchive, then it may have an associated .rlf
Expand All @@ -573,23 +601,24 @@ def collect_dependencies(node_attrs):
# will have it's corresponding .rlf file here:
# renderman/ribarchives/SpidermanRibArchiveShape/SpidermanRibArchiveShape.job.rlf
if node_type == "RenderManArchive" and node_attr == "filename":
archive_dependencies = []
rlf_dirpath = os.path.splitext(path)[0]
rlf_filename = "%s.job.rlf" % os.path.basename(rlf_dirpath)
rlf_filepath = os.path.join(rlf_dirpath, rlf_filename)
logger.debug("Searching for corresponding rlf file: %s", rlf_filepath)
rlf_filepaths = file_utils.process_upload_filepath(rlf_filepath, strict=False)
if rlf_filepaths:
rlf_filepath = rlf_filepaths[0] # there should only be one
# Parse the rlf file for file dependencies.
# Note that though this is an rlf file, there is embedded rib data within
# that we can parse using this rib parser.
logger.debug("Parsing rlf file: %s", rlf_filepath)
rlf_depedencies = parse_rib_file(rlf_filepath)
archive_dependencies.extend([rlf_filepath] + rlf_depedencies)

logger.debug('%s dependencies: %s', plug_name, archive_dependencies)
dependencies.extend(archive_dependencies)
if path not in leaf_paths:
archive_dependencies = []
rlf_dirpath = os.path.splitext(path)[0]
rlf_filename = "%s.job.rlf" % os.path.basename(rlf_dirpath)
rlf_filepath = os.path.join(rlf_dirpath, rlf_filename)
logger.debug("Searching for corresponding rlf file: %s", rlf_filepath)
rlf_filepaths = file_utils.process_upload_filepath(rlf_filepath, strict=False)
if rlf_filepaths:
rlf_filepath = rlf_filepaths[0] # there should only be one
# Parse the rlf file for file dependencies.
# Note that though this is an rlf file, there is embedded rib data within
# that we can parse using this rib parser.
logger.debug("Parsing rlf file: %s", rlf_filepath)
rlf_depedencies = parse_rib_file(rlf_filepath)
archive_dependencies.extend([rlf_filepath] + rlf_depedencies)

logger.debug('%s dependencies: %s', plug_name, archive_dependencies)
dependencies.extend(archive_dependencies)

# ---- REDSHIFT SCRAPING -----
# The redshiftOptions node populates some cache filepaths by default. However,
Expand All @@ -601,14 +630,15 @@ def collect_dependencies(node_attrs):
# to exist on disk). This is not a perfect assumption, but we can adjust as
# needed...perhaps by querying the caching mode.
if node_type == "RedshiftOptions" and file_utils.RX_FRAME_REDSHIFT in path:
logger.debug("Resolving path expression: %s", path)
redshift_filepaths = file_utils.process_upload_filepath(path, strict=False)
if redshift_filepaths:
logger.debug("Resolved filepaths: %s", redshift_filepaths)
dependencies.extend(redshift_filepaths)
# continue here so that we don't append the original (unresolved) path as
# file dependency (later on).
continue
if path not in leaf_paths:
logger.debug("Resolving path expression: %s", path)
redshift_filepaths = file_utils.process_upload_filepath(path, strict=False)
if redshift_filepaths:
logger.debug("Resolved filepaths: %s", redshift_filepaths)
dependencies.extend(redshift_filepaths)
# continue here so that we don't append the original (unresolved) path as
# file dependency (later on).
continue

# Append path to list of dependencies
dependencies.append(path)
Expand All @@ -632,7 +662,7 @@ def collect_dependencies(node_attrs):
dependencies.extend(ass_dependencies)

# Strip out any paths that end in "\" or "/" Hopefully this doesn't break anything.
return sorted(set([path.rstrip("/\\") for path in dependencies]))
return sorted(set([os.path.normpath(path.rstrip("/\\")) for path in dependencies]))


def scrape_yeti_graph(yeti_node):
Expand Down Expand Up @@ -870,8 +900,9 @@ def parse_ocio_config_paths(config_filepath):
for path in search_paths:
# If the path is relative, resolve it
if not os.path.isabs(path):
path = os.path.join(config_dirpath, path)
logging.debug("Resolved relative path '%s' to '%s'", )
relative_path = os.path.join(config_dirpath, path)
logging.debug("Resolved relative path '%s' to '%s'", path, relative_path)
path = relative_path

if not os.path.isdir(path):
logger.warning("OCIO search path does not exist: %s", path)
Expand Down
7 changes: 6 additions & 1 deletion conductor/resources/resources.yml
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,12 @@ arnold_dependency_attrs:
MayaFile:
- filename
xgen_procedural:
- data
- data
procedural:
- filename
image:
- filename

# xgen palette file depdency node attrs
xgen_dependency_attrs:
Palette:
Expand Down
19 changes: 18 additions & 1 deletion conductor/submitter_maya.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,10 @@ class MayaConductorSubmitter(submitter.ConductorSubmitter):
product = "maya-io"

def __init__(self, parent=None):

self.dependency_leaf_glob_file_path = os.environ.get('CONDUCTOR_DEPSCAN_LEAF_FILE')
self.dependency_exclude_glob_file_path = os.environ.get('CONDUCTOR_DEPSCAN_EXCLUDE_FILE')

super(MayaConductorSubmitter, self).__init__(parent=parent)
self.setMayaWindow()

Expand Down Expand Up @@ -381,7 +385,20 @@ def collectDependencies(self):
resources = common.load_resources_file()
dependency_attrs = resources.get("maya_dependency_attrs") or {}

return maya_utils.collect_dependencies(dependency_attrs)
leaf_path_list = []
exclude_path_list = []

if self.dependency_leaf_glob_file_path is not None:
leaf_path_list = file_utils.expand_paths_from_file(self.dependency_leaf_glob_file_path)

if self.dependency_exclude_glob_file_path is not None:
exclude_path_list = file_utils.expand_paths_from_file(self.dependency_exclude_glob_file_path)

logger.debug("Using expanded leaf file list: {}".format(leaf_path_list))

return maya_utils.collect_dependencies(dependency_attrs,
leaf_path_list=leaf_path_list,
exclude_path_list=exclude_path_list)

def getEnvironment(self):
'''
Expand Down