smbinterp/interp/grid/__init__.py

249 lines
6.6 KiB
Python
Raw Normal View History

import sys
from collections import defaultdict
import pickle
from xml.dom.minidom import Document
import numpy as np
from scipy.spatial import KDTree
2010-10-23 12:49:15 -07:00
from interp.baker import run_baker
import interp.grid.simplex
import logging
log = logging.getLogger("interp")
2011-03-30 15:11:04 -07:00
MAX_SEARCH_COUNT = 256
2011-03-23 16:34:42 -07:00
class grid(object):
def __init__(self, verts = None, q = None):
"""
verts = array of arrays (if passed in, will convert to numpy.array)
[
[x0,y0],
[x1,y1], ...
]
q = array (1D) of physical values
"""
if verts != None:
self.verts = np.array(verts)
self.tree = KDTree(self.verts)
if q != None:
self.q = np.array(q)
self.cells = {}
self.cells_for_vert = defaultdict(list)
def get_containing_simplex(self, X):
if not self.cells:
raise Exception("cell connectivity is not set up")
# get closest point
(dist, indicies) = self.tree.query(X, 2)
closest_point = indicies[0]
log.debug('X: %s' % X)
log.debug('point index: %d' % closest_point)
log.debug('actual point %s' % self.verts[closest_point])
log.debug('distance = %0.4f' % dist[0])
simplex = None
checked_cells = []
cells_to_check = list(self.cells_for_vert[closest_point])
attempts = 0
while not simplex and cells_to_check:
attempts += 1
2011-03-23 16:34:42 -07:00
if attempts > MAX_SEARCH_COUNT:
2011-03-23 23:27:06 -07:00
raise Exception("Is the search becoming exhaustive? (%d attempts)" % attempts)
cur_cell = cells_to_check.pop(0)
checked_cells.append(cur_cell)
if cur_cell.contains(X, self):
simplex = cur_cell
continue
for neighbor in cur_cell.neighbors:
if (neighbor not in checked_cells) and (neighbor not in cells_to_check):
cells_to_check.append(neighbor)
if not simplex:
raise Exception('no containing simplex found')
log.debug("simplex vert indicies: %s" % simplex.verts)
R = self.create_mesh(simplex.verts)
log.debug("R:\n%s", R)
log.debug('total attempts before finding simplex: %d' % attempts)
return R
def create_mesh(self, indicies):
"""
2011-03-23 10:23:32 -07:00
this function takes a list of indicies, and then creates and returns a
grid object (collection of verts and q).
note: the input is indicies, the grid contains verts
"""
p = [self.verts[i] for i in indicies]
q = [self.q[i] for i in indicies]
return grid(p, q)
def get_simplex_and_nearest_points(self, X, extra_points = 3):
"""
this returns two grid objects: R and S.
2011-03-23 10:23:32 -07:00
R is a grid object that is supposedly a containing simplex around point X
2011-03-23 10:23:32 -07:00
S is S_j from baker's paper : some verts from all point that are not the
simplex
"""
simplex_size = self.dim + 1
log.debug("extra verts: %d" % extra_points)
log.debug("simplex size: %d" % simplex_size)
r_mesh = self.get_containing_simplex(X)
# and some UNIQUE extra verts
(dist, indicies) = self.tree.query(X, simplex_size + extra_points)
log.debug("extra indicies: %s" % indicies)
unique_indicies = []
for index in indicies:
close_point_in_R = False
for rvert in r_mesh.verts:
if all(rvert == self.verts[index]):
close_point_in_R = True
break
if not close_point_in_R:
unique_indicies.append(index)
else:
log.debug('throwing out %s: %s' % (index, self.verts[index]))
log.debug("indicies: %s" % indicies)
log.debug("unique indicies: %s" % unique_indicies)
s_mesh = self.create_mesh(unique_indicies)
return (r_mesh, s_mesh)
def run_baker(self, X, extra_points = 3, order = 2):
(R, S) = self.get_simplex_and_nearest_points(X, extra_points)
answer = run_baker(X, R, S, order)
return answer
def for_qhull_generator(self):
"""
this returns a generator that should be fed into qdelaunay
"""
yield str(len(self.verts[0]));
yield '%d' % len(self.verts)
for p in self.verts:
yield "%f %f %f" % tuple(p)
def for_qhull(self):
"""
this returns a single string that should be fed into qdelaunay
"""
r = '%d\n' % len(self.verts[0])
r += '%d\n' % len(self.verts)
for p in self.verts:
# r += "%f %f %f\n" % tuple(p)
r += "%s\n" % " ".join("%f" % i for i in p)
return r
def __str__(self):
r = ''
assert( len(self.verts) == len(self.q) )
for c, i in enumerate(zip(self.verts, self.q)):
r += "%d vert(%s): q(%0.4f)" % (c,i[0], i[1])
cell_str = ", ".join([str(f.name) for f in self.cells_for_vert[c]])
r += " cells: [%s]" % cell_str
r += "\n"
if self.cells:
for v in self.cells.itervalues():
r += "%s\n" % v
return r
def normalize_q(self, new_max = 0.1):
largest_number = np.max(np.abs(self.q))
self.q *= new_max/largest_number
def dump_to_blender_files(self, pfile = '/tmp/points.p', cfile = '/tmp/cells.p'):
if len(self.verts[0]) == 2:
pickle.dump([(p[0], p[1], 0.0) for p in self.verts], open(pfile, 'w'))
else:
pickle.dump([(p[0], p[1], p[2]) for p in self.verts], open(pfile, 'w'))
pickle.dump([f.verts for f in self.cells.itervalues()], open(cfile, 'w'))
def get_xml(self):
doc = Document()
ps = doc.createElement("points")
doc.appendChild(ps)
for i in zip(self.verts, self.q):
p = doc.createElement("point")
p.setAttribute("x", str(i[0][0]))
p.setAttribute('y', str(i[0][1]))
p.setAttribute('z', str(i[0][2]))
p.setAttribute('q', str(i[1] ))
ps.appendChild(p)
return doc
def toxml(self):
return self.get_xml().toxml()
def toprettyxml(self):
return self.get_xml().toprettyxml()
class cell(object):
def __init__(self, name):
self.name = name
self.verts = []
self.neighbors = []
def add_vert(self, v):
"""
v should be an index into grid.verts
"""
self.verts.append(v)
def add_neighbor(self, n):
"""
reference to another cell object
"""
self.neighbors.append(n)
def contains(self, X, G):
"""
X = point of interest
G = corrensponding grid object (G.verts)
2011-03-23 10:23:32 -07:00
because of the way i'm storing things, a cell simply stores indicies, and
so one must pass in a reference to the grid object containing real verts.
this simply calls grid.simplex.contains
"""
return interp.grid.simplex.contains(X, [G.verts[i] for i in self.verts])
def __str__(self):
# neighbors = [str(i.name) for i in self.neighbors]
return '<cell %s: verts: %s neighbor count: %s>' %\
(
self.name,
self.verts,
len(self.neighbors),
# ", ".join(neighbors)
)
__repr__ = __str__