Skip to content

Commit

Permalink
Add compatibility with python 3.8
Browse files Browse the repository at this point in the history
  • Loading branch information
fferflo committed Jan 17, 2024
1 parent 2b50e05 commit f8cc37f
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions einx/backend/tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,10 +352,10 @@ def reduce(tensor, axis, *, op=None, **kwargs):
else:
for a in reversed(sorted(axes)):
del shape[a]
return Op(op, args=[tensor], kwargs=kwargs | {"axis": axis}, output_shapes=np.asarray(shape)).output_tracers
return Op(op, args=[tensor], kwargs={**kwargs, **{"axis": axis}}, output_shapes=np.asarray(shape)).output_tracers

def map(tensor, axis, op, *args, **kwargs):
return Op(op, args=[tensor], kwargs=kwargs | {"axis": axis}, output_shapes=np.asarray(tensor.shape)).output_tracers
return Op(op, args=[tensor], kwargs={**kwargs, **{"axis": axis}}, output_shapes=np.asarray(tensor.shape)).output_tracers

def index(tensor, coordinates, update=None, op=None):
return Op(op, args=[tensor, coordinates, update], output_shapes=np.asarray(coordinates[0].shape)).output_tracers
Expand Down
2 changes: 1 addition & 1 deletion einx/lru_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def lru_cache(func=None, trace=None):
if max_cache_size == 0:
inner = func
elif max_cache_size < 0:
inner = freeze(functools.cache(func)) # No cache limit
inner = freeze(functools.lru_cache(maxsize=None)(func)) # No cache limit
else:
inner = freeze(functools.lru_cache(maxsize=max_cache_size)(func))
else:
Expand Down
2 changes: 1 addition & 1 deletion einx/param.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def instantiate(x, shape, backend, **kwargs):
raise TypeError("instantiate cannot be called on None")
if backend == einx.backend.tracer:
if is_tensor_factory(x):
return einx.backend.tracer.Op(instantiate, [x], {"shape": shape} | kwargs, output_shapes=np.asarray(shape), pass_backend=True).output_tracers
return einx.backend.tracer.Op(instantiate, [x], {**{"shape": shape}, **kwargs}, output_shapes=np.asarray(shape), pass_backend=True).output_tracers
else:
return einx.backend.tracer.Op("to_tensor", [x], output_shapes=np.asarray(shape)).output_tracers
else:
Expand Down

0 comments on commit f8cc37f

Please sign in to comment.