我们从Python开源项目中,提取了以下29个代码示例,用于说明如何使用theano.tensor.Apply()。
def param_search(node, critereon): """ Traverse the Theano graph starting at `node` and return a list of all nodes which match the `critereon` function. When optimizing a cost function, you can use this to get a list of all of the trainable params in the graph, like so: `lib.search(cost, lambda x: hasattr(x, "param"))` """ def _search(node, critereon, visited): if node in visited: return [] visited.add(node) results = [] if isinstance(node, tensor.Apply): for inp in node.inputs: results += _search(inp, critereon, visited) else: # Variable node if critereon(node): results.append(node) if node.owner is not None: results += _search(node.owner, critereon, visited) return results return _search(node, critereon, set())
def sine_window(X): """ Apply a sinusoid window to X. Parameters ---------- X : ndarray, shape=(n_samples, n_features) Input array of samples Returns ------- X_windowed : ndarray, shape=(n_samples, n_features) Windowed version of X. """ i = np.arange(X.shape[1]) win = np.sin(np.pi * (i + 0.5) / X.shape[1]) row_stride = 0 col_stride = win.itemsize strided_win = as_strided(win, shape=X.shape, strides=(row_stride, col_stride)) return X * strided_win
def search(node, critereon): """ Traverse the Theano graph starting at `node` and return a list of all nodes which match the `critereon` function. When optimizing a cost function, you can use this to get a list of all of the trainable params in the graph, like so: `lib.search(cost, lambda x: hasattr(x, "param"))` or `lib.search(cost, lambda x: hasattr(x, "param") and x.param==True)` """ def _search(node, critereon, visited): if node in visited: return [] visited.add(node) results = [] if isinstance(node, T.Apply): for inp in node.inputs: results += _search(inp, critereon, visited) else: # Variable node if critereon(node): results.append(node) if node.owner is not None: results += _search(node.owner, critereon, visited) return results return _search(node, critereon, set())
def search(node, critereon): """ Traverse the Theano graph starting at `node` and return a list of all nodes which match the `critereon` function. When optimizing a cost function, you can use this to get a list of all of the trainable params in the graph, like so: `lib.search(cost, lambda x: hasattr(x, "param"))` """ def _search(node, critereon, visited): if node in visited: return [] visited.add(node) results = [] if isinstance(node, T.Apply): for inp in node.inputs: results += _search(inp, critereon, visited) else: # Variable node if critereon(node): results.append(node) if node.owner is not None: results += _search(node.owner, critereon, visited) return results return _search(node, critereon, set())