Skip to content

Commit cfd468b

Browse files
committed
Merge branch 'st3'
2 parents d6955fd + 8cece69 commit cfd468b

File tree

3 files changed

+64
-58
lines changed

3 files changed

+64
-58
lines changed

latextools/latextools_cache_listener.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -47,36 +47,40 @@ def update_cache(cache, doc, bib):
4747
def worker():
4848
with ActivityIndicator("Updating LaTeXTools cache") as activity:
4949
try:
50-
cache.invalidate("bib_files")
50+
cache.invalidate()
5151
if doc:
5252
logger.debug("Updating analysis cache for %s", cache.tex_root)
5353
cache.set("analysis", analysis.analyze_document(cache.tex_root))
5454
if bib:
5555
logger.debug("Updating bibliography cache for %s", cache.tex_root)
5656
run_plugin_command("get_entries", *(find_bib_files(cache.tex_root) or []))
5757
except Exception:
58+
activity.finish("LaTeXTools cache update failed")
5859
traceback.print_exc()
5960
else:
6061
activity.finish("LaTeXTools cache updated")
6162

62-
if cache and (doc or bib):
63+
if cache:
6364
threading.Thread(target=worker).start()
6465

6566

6667
class LatextoolsCacheUpdateListener(sublime_plugin.EventListener):
6768
def on_load(self, view):
68-
if not view.match_selector(0, "text.tex.latex"):
69+
if not view.is_primary():
6970
return
7071

71-
update_doc = get_setting("cache.analysis.update_on_load", True, view)
72-
update_bib = get_setting("cache.bibliography.update_on_load", True, view)
73-
if not update_doc and not update_bib:
72+
if not view.match_selector(0, "text.tex.latex"):
7473
return
7574

7675
cache = get_cache(view)
7776
if not cache:
7877
return
7978

79+
update_doc = get_setting("cache.analysis.update_on_load", True, view)
80+
update_bib = get_setting("cache.bibliography.update_on_load", True, view)
81+
if not update_doc and not update_bib:
82+
return
83+
8084
# because cache state is shared amongst all documents sharing a tex
8185
# root, this ensure we only load the analysis ONCE in the on_load
8286
# event
@@ -90,10 +94,10 @@ def on_close(self, view):
9094
remove_cache(view)
9195

9296
def on_post_save(self, view):
93-
if not view.match_selector(0, "text.tex.latex"):
97+
if not view.is_primary():
9498
return
9599

96-
if not view.is_primary():
100+
if not view.match_selector(0, "text.tex.latex"):
97101
return
98102

99103
update_doc = get_setting("cache.analysis.update_on_save", True, view)

latextools/utils/analysis.py

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -425,17 +425,14 @@ def _analyze_tex_file(
425425
logger.error("File appears cyclic: %s\n%s", file_name, process_file_stack)
426426
return ana
427427

428-
if not import_path:
429-
base_path, _ = os.path.split(tex_root)
430-
else:
431-
base_path = import_path
428+
base_path = import_path if import_path else os.path.dirname(tex_root)
432429

433430
# store import path at the base path, such that it can be accessed
434431
if import_path:
435432
if file_name in ana._import_base_paths:
436433
if ana._import_base_paths[file_name] != import_path:
437434
logger.warning(
438-
"'%s' is imported twice. " "Cannot handle this correctly in the analysis.",
435+
"'%s' is imported twice. Cannot handle this correctly in the analysis.",
439436
file_name,
440437
)
441438
else:
@@ -473,6 +470,7 @@ def _analyze_tex_file(
473470
# check that we still need to analyze
474471
if only_preamble and ana._state.get("preamble_finished", False):
475472
return ana
473+
476474
elif g("command") in _import_commands and g("args") is not None and g("args2") is not None:
477475
if g("command").startswith("sub"):
478476
next_import_path = os.path.join(base_path, g("args").strip('"'))
@@ -495,6 +493,7 @@ def _analyze_tex_file(
495493
# check that we still need to analyze
496494
if only_preamble and ana._state.get("preamble_finished", False):
497495
return ana
496+
498497
# subfile support:
499498
# if we are not in the root file (i.e. not call from included files)
500499
# and have the command \documentclass[main.tex]{subfiles}
@@ -519,6 +518,22 @@ def _analyze_tex_file(
519518
except KeyError:
520519
pass
521520

521+
# usepackage(local) support:
522+
# analyze existing local packages or stylesheets
523+
elif g("command") == "usepackage" and g("args") is not None:
524+
fn = os.path.join(base_path, os.path.splitext(g("args").strip('"'))[0])
525+
for ext in (".sty", ".tex"):
526+
open_file = fn + ext
527+
if os.path.isfile(open_file):
528+
process_file_stack.append(file_name)
529+
_analyze_tex_file(tex_root, open_file, process_file_stack, ana)
530+
process_file_stack.pop()
531+
break
532+
533+
# check that we still need to analyze
534+
if only_preamble and ana._state.get("preamble_finished", False):
535+
return ana
536+
522537
return ana
523538

524539

latextools/utils/cache.py

Lines changed: 32 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,7 @@ def hash_digest(text):
5050
Arguments:
5151
text -- the text for which the digest should be created
5252
"""
53-
text_encoded = text.encode("utf8")
54-
hash_result = hashlib.md5(text_encoded)
55-
return hash_result.hexdigest()
53+
return hashlib.md5(text.encode("utf-8")).hexdigest()
5654

5755

5856
def cache_local(tex_root, key, func):
@@ -173,27 +171,28 @@ def _global_cache_path():
173171

174172
# marker class for invalidated result
175173
class InvalidObject:
176-
_HASH = hash("_LaTeXTools_InvalidObject")
174+
__slots__ = []
175+
__hash = hash("_LaTeXTools_InvalidObject")
177176

178-
def __eq__(self, other):
177+
@classmethod
178+
def __hash__(cls):
179+
return cls.__hash
180+
181+
@classmethod
182+
def __eq__(cls, other):
179183
# in general, this is a bad pattern, since it will treat the
180184
# literal string "_LaTeXTools_InvalidObject" as being an invalid
181185
# object; nevertheless, we need an object identity that persists
182186
# across reloads, and this seems to be the only way to guarantee
183187
# that
184-
return self._HASH == hash(other)
185-
186-
def __ne__(self, other):
187-
return not self == other
188-
189-
def __hash__(self):
190-
return self._HASH
191-
188+
try:
189+
return cls.__hash == hash(other)
190+
except TypeError:
191+
return False
192192

193-
try:
194-
_invalid_object
195-
except NameError:
196-
_invalid_object = InvalidObject()
193+
@classmethod
194+
def __ne__(cls, other):
195+
return not cls == other
197196

198197

199198
class Cache:
@@ -216,14 +215,12 @@ def __init__(self):
216215
self._disk_lock = threading.Lock()
217216
if not hasattr(self, "_write_lock"):
218217
self._write_lock = threading.Lock()
219-
if not hasattr(self, "_save_lock"):
220-
self._save_lock = threading.Lock()
221218
if not hasattr(self, "_objects"):
222219
self._objects = {}
223220
if not hasattr(self, "_dirty"):
224221
self._dirty = False
225222
if not hasattr(self, "_save_queue"):
226-
self._save_queue = []
223+
self._save_queue = 0
227224
if not hasattr(self, "_pool"):
228225
self._pool = ThreadPool(2)
229226

@@ -247,7 +244,7 @@ def get(self, key):
247244
# note: will raise CacheMiss if can't be found
248245
result = self.load(key)
249246

250-
if result == _invalid_object:
247+
if result == InvalidObject:
251248
raise CacheMiss("{0} is invalid".format(key))
252249

253250
# return a copy of any objects
@@ -269,7 +266,7 @@ def has(self, key):
269266
if key is None:
270267
raise ValueError("key cannot be None")
271268

272-
return key in self._objects and self._objects[key] != _invalid_object
269+
return key in self._objects and self._objects[key] != InvalidObject
273270

274271
def set(self, key, obj):
275272
"""
@@ -284,11 +281,6 @@ def set(self, key, obj):
284281
if key is None:
285282
raise ValueError("key cannot be None")
286283

287-
try:
288-
pickle.dumps(obj, protocol=-1)
289-
except pickle.PicklingError:
290-
raise ValueError("obj must be picklable")
291-
292284
if isinstance(obj, list):
293285
obj = tuple(obj)
294286
elif isinstance(obj, dict):
@@ -336,7 +328,7 @@ def invalidate(self, key=None):
336328

337329
def _invalidate(key):
338330
try:
339-
self._objects[key] = _invalid_object
331+
self._objects[key] = InvalidObject
340332
except Exception:
341333
logger.error("error occurred while invalidating %s", key)
342334
traceback.print_exc()
@@ -412,12 +404,12 @@ def save(self, key=None):
412404
with self._disk_lock:
413405
# operate on a stable copy of the object
414406
with self._write_lock:
415-
_objs = pickle.loads(pickle.dumps(self._objects, protocol=-1))
407+
_objs = self._objects.copy()
416408
self._dirty = False
417409

418410
if key is None:
419411
# remove all InvalidObjects
420-
delete_keys = [k for k in _objs if _objs[k] == _invalid_object]
412+
delete_keys = [k for k in _objs if _objs[k] == InvalidObject]
421413

422414
for k in delete_keys:
423415
del _objs[k]
@@ -442,7 +434,7 @@ def save(self, key=None):
442434
logger.error("error while deleting %s: %s", self.cache_path, e)
443435

444436
elif key in _objs:
445-
if _objs[key] == _invalid_object:
437+
if _objs[key] == InvalidObject:
446438
file_path = os.path.join(self.cache_path, key)
447439
try:
448440
os.remove(file_path)
@@ -475,17 +467,16 @@ def _write(self, key, obj):
475467
raise CacheMiss()
476468

477469
def _schedule_save(self):
478-
with self._save_lock:
479-
self._save_queue.append(0)
480-
threading.Timer(0.5, self._debounce_save).start()
481-
482-
def _debounce_save(self):
483-
with self._save_lock:
484-
if len(self._save_queue) > 1:
485-
self._save_queue.pop()
470+
def _debounce():
471+
self._save_queue -= 1
472+
if self._save_queue > 0:
473+
sublime.set_timeout(_debounce, 1000)
486474
else:
487-
self._save_queue = []
488-
sublime.set_timeout(self.save_async, 0)
475+
self._save_queue = 0
476+
self.save_async()
477+
478+
self._save_queue += 1
479+
sublime.set_timeout(_debounce, 1000)
489480

490481
# ensure cache is saved to disk when removed from memory
491482
def __del__(self):
@@ -561,8 +552,6 @@ def get(self, key):
561552

562553
return super(ValidatingCache, self).get(key)
563554

564-
get.__doc__ = Cache.get.__doc__
565-
566555
def set(self, key, obj):
567556
if key is None:
568557
raise ValueError("key cannot be None")
@@ -571,8 +560,6 @@ def set(self, key, obj):
571560

572561
return super(ValidatingCache, self).set(key, obj)
573562

574-
set.__doc__ = Cache.set.__doc__
575-
576563

577564
class InstanceTrackingCache(Cache):
578565
"""

0 commit comments

Comments
 (0)