import numpy as np
import copy
try:
from functools import lru_cache # For Python 3 only
except ImportError: # Python 2:
import time
import functools
import collections
# Note to avoid using external packages such as functools32 we use this code
# only using the standard library
def lru_cache(maxsize=255, timeout=None):
"""
Thanks to ilialuk @ https://stackoverflow.com/users/2121105/ilialuk for
this code snippet. Modifications by S. Endres
"""
class LruCacheClass(object):
def __init__(self, input_func, max_size, timeout):
self._input_func = input_func
self._max_size = max_size
self._timeout = timeout
# This will store the cache for this function,
# format - {caller1 : [OrderedDict1, last_refresh_time1],
# caller2 : [OrderedDict2, last_refresh_time2]}.
# In case of an instance method - the caller is the instance,
# in case called from a regular function - the caller is None.
self._caches_dict = {}
def cache_clear(self, caller=None):
# Remove the cache for the caller, only if exists:
if caller in self._caches_dict:
del self._caches_dict[caller]
self._caches_dict[caller] = [collections.OrderedDict(),
time.time()]
def __get__(self, obj, objtype):
""" Called for instance methods """
return_func = functools.partial(self._cache_wrapper, obj)
return_func.cache_clear = functools.partial(self.cache_clear,
obj)
# Return the wrapped function and wraps it to maintain the
# docstring and the name of the original function:
return functools.wraps(self._input_func)(return_func)
def __call__(self, *args, **kwargs):
""" Called for regular functions """
return self._cache_wrapper(None, *args, **kwargs)
# Set the cache_clear function in the __call__ operator:
__call__.cache_clear = cache_clear
def _cache_wrapper(self, caller, *args, **kwargs):
# Create a unique key including the types (in order to
# differentiate between 1 and '1'):
kwargs_key = "".join(map(
lambda x: str(x) + str(type(kwargs[x])) + str(kwargs[x]),
sorted(kwargs)))
key = "".join(
map(lambda x: str(type(x)) + str(x), args)) + kwargs_key
# Check if caller exists, if not create one:
if caller not in self._caches_dict:
self._caches_dict[caller] = [collections.OrderedDict(),
time.time()]
else:
# Validate in case the refresh time has passed:
if self._timeout is not None:
if (time.time() - self._caches_dict[caller][1]
> self._timeout):
self.cache_clear(caller)
# Check if the key exists, if so - return it:
cur_caller_cache_dict = self._caches_dict[caller][0]
if key in cur_caller_cache_dict:
return cur_caller_cache_dict[key]
# Validate we didn't exceed the max_size:
if len(cur_caller_cache_dict) >= self._max_size:
# Delete the first item in the dict:
try:
cur_caller_cache_dict.popitem(False)
except KeyError:
pass
# Call the function and store the data in the cache (call it
# with the caller in case it's an instance function)
if caller is not None:
args = (caller,) + args
cur_caller_cache_dict[key] = self._input_func(*args, **kwargs)
return cur_caller_cache_dict[key]
# Return the decorator wrapping the class (also wraps the instance to
# maintain the docstring and the name of the original function):
return (lambda input_func: functools.wraps(input_func)(
LruCacheClass(input_func, maxsize, timeout)))
class Complex:
def __init__(self, dim, func, func_args=(), symmetry=False, bounds=None,
g_cons=None, g_args=()):
self.dim = dim
self.bounds = bounds
self.symmetry = symmetry # TODO: Define the functions to be used
# here in init to avoid if checks
self.gen = 0
self.perm_cycle = 0
# Every cell is stored in a list of its generation,
# ex. the initial cell is stored in self.H[0]
# 1st get new cells are stored in self.H[1] etc.
# When a cell is subgenerated it is removed from this list
self.H = [] # Storage structure of cells
# Cache of all vertices
self.V = VertexCache(func, func_args, bounds, g_cons, g_args)
# Generate n-cube here:
self.n_cube(dim, symmetry=symmetry)
# TODO: Assign functions to a the complex instead
if symmetry:
self.generation_cycle = 1
# self.centroid = self.C0()[-1].x
# self.C0.centroid = self.centroid
else:
self.add_centroid()
self.H.append([])
self.H[0].append(self.C0)
self.hgr = self.C0.homology_group_rank()
self.hgrd = 0 # Complex group rank differential
# self.hgr = self.C0.hg_n
# Build initial graph
self.graph_map()
self.performance = []
self.performance.append(0)
self.performance.append(0)
def __call__(self):
return self.H
def n_cube(self, dim, symmetry=False, printout=False):
"""
Generate the simplicial triangulation of the n dimensional hypercube
containing 2**n vertices
"""
origin = list(np.zeros(dim, dtype=int))
self.origin = origin
supremum = list(np.ones(dim, dtype=int))
self.supremum = supremum
# tuple versions for indexing
origintuple = tuple(origin)
supremumtuple = tuple(supremum)
x_parents = [origintuple]
if symmetry:
self.C0 = Simplex(0, 0, 0, self.dim) # Initial cell object
self.C0.add_vertex(self.V[origintuple])
i_s = 0
self.perm_symmetry(i_s, x_parents, origin)
self.C0.add_vertex(self.V[supremumtuple])
else:
self.C0 = Cell(0, 0, origin, supremum) # Initial cell object
self.C0.add_vertex(self.V[origintuple])
self.C0.add_vertex(self.V[supremumtuple])
i_parents = []
self.perm(i_parents, x_parents, origin)
if printout:
print("Initial hyper cube:")
for v in self.C0():
v.print_out()
def perm(self, i_parents, x_parents, xi):
# TODO: Cut out of for if outside linear constraint cutting planes
xi_t = tuple(xi)
# Construct required iterator
iter_range = [x for x in range(self.dim) if x not in i_parents]
for i in iter_range:
i2_parents = copy.copy(i_parents)
i2_parents.append(i)
xi2 = copy.copy(xi)
xi2[i] = 1
# Make new vertex list a hashable tuple
xi2_t = tuple(xi2)
# Append to cell
self.C0.add_vertex(self.V[xi2_t])
# Connect neighbours and vice versa
# Parent point
self.V[xi2_t].connect(self.V[xi_t])
# Connect all family of simplices in parent containers
for x_ip in x_parents:
self.V[xi2_t].connect(self.V[x_ip])
x_parents2 = copy.copy(x_parents)
x_parents2.append(xi_t)
# Permutate
self.perm(i2_parents, x_parents2, xi2)
def perm_symmetry(self, i_s, x_parents, xi):
# TODO: Cut out of for if outside linear constraint cutting planes
xi_t = tuple(xi)
xi2 = copy.copy(xi)
xi2[i_s] = 1
# Make new vertex list a hashable tuple
xi2_t = tuple(xi2)
# Append to cell
self.C0.add_vertex(self.V[xi2_t])
# Connect neighbours and vice versa
# Parent point
self.V[xi2_t].connect(self.V[xi_t])
# Connect all family of simplices in parent containers
for x_ip in x_parents:
self.V[xi2_t].connect(self.V[x_ip])
x_parents2 = copy.copy(x_parents)
x_parents2.append(xi_t)
i_s += 1
if i_s == self.dim:
return
# Permutate
self.perm_symmetry(i_s, x_parents2, xi2)
def add_centroid(self):
"""Split the central edge between the origin and supremum of
a cell and add the new vertex to the complex"""
self.centroid = list(
(np.array(self.origin) + np.array(self.supremum)) / 2.0)
self.C0.add_vertex(self.V[tuple(self.centroid)])
self.C0.centroid = self.centroid
# Disconnect origin and supremum
self.V[tuple(self.origin)].disconnect(self.V[tuple(self.supremum)])
# Connect centroid to all other vertices
for v in self.C0():
self.V[tuple(self.centroid)].connect(self.V[tuple(v.x)])
self.centroid_added = True
return
# Construct incidence array:
def incidence(self):
if self.centroid_added:
self.structure = np.zeros([2 ** self.dim + 1, 2 ** self.dim + 1],
dtype=int)
else:
self.structure = np.zeros([2 ** self.dim, 2 ** self.dim],
dtype=int)
for v in self.HC.C0():
for v2 in v.nn:
self.structure[v.index, v2.index] = 1
return
# A more sparse incidence generator:
def graph_map(self):
""" Make a list of size 2**n + 1 where an entry is a vertex
incidence, each list element contains a list of indexes
corresponding to that entries neighbours"""
self.graph = [[v2.index for v2 in v.nn] for v in self.C0()]
# Graph structure method:
# 0. Capture the indices of the initial cell.
# 1. Generate new origin and supremum scalars based on current generation
# 2. Generate a new set of vertices corresponding to a new
# "origin" and "supremum"
# 3. Connected based on the indices of the previous graph structure
# 4. Disconnect the edges in the original cell
def sub_generate_cell(self, C_i, gen):
"""Subgenerate a cell `C_i` of generation `gen` and
homology group rank `hgr`."""
origin_new = tuple(C_i.centroid)
centroid_index = len(C_i()) - 1
# If not gen append
try:
self.H[gen]
except IndexError:
self.H.append([])
# Generate subcubes using every extreme vertex in C_i as a supremum
# and the centroid of C_i as the origin
H_new = [] # list storing all the new cubes split from C_i
for i, v in enumerate(C_i()[:-1]):
supremum = tuple(v.x)
H_new.append(
self.construct_hypercube(origin_new, supremum, gen, C_i.hg_n))
for i, connections in enumerate(self.graph):
# Present vertex V_new[i]; connect to all connections:
if i == centroid_index: # Break out of centroid
break
for j in connections:
C_i()[i].disconnect(C_i()[j])
# Destroy the old cell
if C_i is not self.C0: # Garbage collector does this anyway; not needed
del C_i
# TODO: Recalculate all the homology group ranks of each cell
return H_new
def split_generation(self):
"""
Run sub_generate_cell for every cell in the current complex self.gen
"""
no_splits = False # USED IN SHGO
try:
for c in self.H[self.gen]:
if self.symmetry:
# self.sub_generate_cell_symmetry(c, self.gen + 1)
self.split_simplex_symmetry(c, self.gen + 1)
else:
self.sub_generate_cell(c, self.gen + 1)
except IndexError:
no_splits = True # USED IN SHGO
self.gen += 1
return no_splits # USED IN SHGO
# @lru_cache(maxsize=None)
def construct_hypercube(self, origin, supremum, gen, hgr,
printout=False):
"""
Build a hypercube with triangulations symmetric to C0.
Loading ...