Conceptually, if multiple master documents are processed by interscript, elaboration of each document is performed in a separate thread. The master frame contains thread local data, whereas the process frame is shared by all threads.
At the 'document' level, the failure to generate a document correctly is at most fatal to that thread of control because documents are in some senses independent. However, when multiple documents form a project, inter-document dependencies may dictate abortion of the whole processes if one of the master threads fails.
1: #line 23 "master_frame.ipk" 2: from interscript.frames.passf import pass_frame 3: from interscript.drivers.sources.disk import parse_source_filename 4: from interscript.languages.interscript_languages import add_translation 5: import pickle 6: import types 7: 8: class master_frame: 9: def _cal_deps(self,t,i): 10: #print 'caldeps',i 11: n = len(t) 12: level0, kind0, filename0, filetime0, changed0, tobuild0 = t[i] 13: tobuild0 = changed0 14: j=i+1 15: while j<n: 16: if t[j][0] <= level0: break 17: tobuild1, j = self._cal_deps(t,j) 18: tobuild0 = tobuild0 or tobuild1 19: t[i][5]=tobuild0 20: return tobuild0, j 21: 22: def __init__(self,process,argument_frame): 23: self.argument_frame = argument_frame 24: self.process = process 25: self.site = process.site 26: self.platform = process.site.platform 27: 28: if 'frames' in process.trace: 29: self.process.acquire_object(self,'MASTER FRAME') 30: for k in argument_frame.__dict__.keys(): 31: if 'options' in self.process.trace: 32: print 'setting MASTER',k,'as',argument_frame.__dict__[k] 33: setattr(self,k,argument_frame.__dict__[k]) 34: self.ids = {} 35: self.iflist = [] 36: self.ftp_list = [] 37: self.flist = [] 38: self.fdict = {} 39: self.toc = [] 40: self.include_files = [] 41: self.classes = {} 42: self.functions = {} 43: self.sequence_limit = -1 44: self.section_index = {} 45: self.tests = {} 46: self.noticedict = {} 47: self.persistent_frames = {} 48: self.cache_age = 1 # very old :-) 49: os = self.platform.os 50: if self.usecache: 51: self.cache_name =self.platform.map_filename( 52: self.source_prefix, self.filename+'.cache') 53: try: 54: if 'cache' in self.process.trace: 55: print 'CACHE NAME=',self.cache_name 56: cache = self.platform.open(self.cache_name,'r') 57: if 'cache' in self.process.trace: 58: print 'CACHE FILE OPENED' 59: self.persistent_frames = pickle.load(cache) 60: cache.close() 61: if 'cache' in self.process.trace: 62: print 'GOT CACHE' 63: self.cache_age = self.platform.getmtime(self.cache_name) 64: if 'cache' in self.process.trace: 65: print 'AGE=',self.cache_age 66: self.dump_cache() 67: del cache 68: except KeyboardInterrupt: raise 69: except: 70: if 'cache' in self.process.trace: 71: print 'UNABLE TO LOAD CACHE' 72: 73: include_files = self.persistent_frames.get('include files',[]) 74: old_options = self.persistent_frames.get('options',{}) 75: 76: self.src_tree = [] 77: for level, kind, filename in include_files: 78: f = self.platform.map_filename(self.source_prefix, filename) 79: filetime = self.platform.getmtime(f) 80: changed = filetime > self.cache_age 81: self.src_tree.append([level, kind, filename, filetime, changed, -1]) 82: 83: if 'deps' in self.process.trace: 84: print 'SOURCE FILE CHANGE STATUS DEPENDENCIES' 85: print 'CACHE TIME=',self.cache_age 86: self.dump_entry(self.src_tree,0) 87: print 88: 89: src_tree_changed = 1 90: dummy = 0 91: if len(self.src_tree)>0: 92: src_tree_changed, dummy = self._cal_deps(self.src_tree,0) 93: del dummy 94: 95: if 'deps' in self.process.trace: 96: print 'COMPUTED DEPENDENCIES' 97: print 'CACHE TIME=',self.cache_age, 98: self.dump_entry(self.src_tree,0) 99: print 100: if src_tree_changed: 101: print 'WORK TO BE DONE' 102: for level, kind, filename, filetime, changed, tobuild in self.src_tree: 103: if changed: print kind,'file',filename,'CHANGED' 104: for level, kind, filename, filetime, changed, tobuild in self.src_tree: 105: if tobuild: print kind,'file',filename,'WILL BE REBUILT' 106: for level, kind, filename, filetime, changed, tobuild in self.src_tree: 107: if not tobuild: print kind,'file',filename,'WILL BE SKIPPED' 108: else: 109: print 'NO FILES CHANGED' 110: if self.autoweave: 111: print 'WEAVING ENABLED' 112: else: print 'NO WEAVING' 113: 114: if src_tree_changed: 115: skiplist = [] 116: for level, kind, filename, filetime, changed, tobuild in self.src_tree: 117: if not tobuild: 118: skiplist.append((level, kind, filename)) 119: current_options = self.argument_frame.__dict__ 120: 121: ign_opt = ['trace','passes'] 122: cur_opt = current_options.copy() 123: old_opt = old_options.copy() 124: for key in cur_opt.keys(): 125: if key in ign_opt: del cur_opt[key] 126: for key in old_opt.keys(): 127: if key in ign_opt: del old_opt[key] 128: 129: options_changed = old_opt != cur_opt 130: del ign_opt, cur_opt, old_opt 131: 132: old_converged = self.persistent_frames.get('converged',0) 133: if 'deps' in self.process.trace: 134: if old_converged: 135: print 'PREVIOUS RUN CONVERGED' 136: else: 137: print 'PREVIOUS RUN DID NOT CONVERGE' 138: if options_changed: 139: print 'OPTIONS CHANGED' 140: else: 141: print 'SAME OPTIONS AS BEFORE' 142: 143: if (not options_changed) and old_converged and (not src_tree_changed): 144: print 'NO WORK TO DO, returning' 145: return 146: 147: old_skiplist = self.persistent_frames.get('skiplist',[]) 148: if options_changed: 149: print 'PROCESSING WHOLE FILE (options changed)' 150: skiplist = [] 151: elif self.autoweave: 152: print 'PROCESSING WHOLE FILE (incremental weaving not supported yet)' 153: skiplist = [] 154: else: 155: if old_converged: 156: print 'SKIPPING (newly changed files)' 157: for sig in skiplist: 158: print ' ',sig 159: else: 160: if src_tree_changed: 161: print 'PROCESSING WHOLE FILE (source changed)' 162: skiplist = [] 163: else: 164: print 'SKIPPING (files skipped last run, which did not converge)' 165: skiplist = old_skiplist 166: self.run_passes(skiplist) 167: self.persistent_frames['skiplist']=skiplist 168: return 169: