Skip to content

Commit 1910009

Browse files
committed
Change softmax iterator, also make example compile
1 parent c69835a commit 1910009

File tree

4 files changed

+18
-15
lines changed

4 files changed

+18
-15
lines changed

examples/ex06_shakespeare_generator.nim

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ const
5656
#
5757
# ################################################################
5858

59-
func strToTensor(str: string|TaintedString): Tensor[PrintableIdx] =
59+
proc strToTensor(str: string|TaintedString): Tensor[PrintableIdx] =
6060
result = newTensor[PrintableIdx](str.len)
6161

6262
# For each x in result, map the corresponding char index

src/arraymancer/laser/strided_iteration/foreach_common.nim

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@ proc aliasTensor(id: int, tensor: NimNode): tuple[alias: NimNode, isVar: NimNode
4646
for i in 2 ..< tensor.len:
4747
t.add tensor[i]
4848

49-
var alias = ""
5049
let isVar = block:
5150
# Handle slicing cases like foo[0..<1, 0..<2]
5251
# that do not return `var` but are technically `var`
@@ -57,16 +56,18 @@ proc aliasTensor(id: int, tensor: NimNode): tuple[alias: NimNode, isVar: NimNode
5756
else:
5857
quote do: isVar(`t`)
5958

60-
while t.kind in {nnkDotExpr, nnkBracketExpr}:
61-
if t[0].kind notin {nnkIdent, nnkSym}:
62-
error "Expected a field name but found \"" & t[0].repr()
63-
alias.add $t[0]
64-
if t.kind == nnkBracketExpr and validIdentifier(t[1].repr()):
65-
alias.add $t[1]
66-
alias.add "_"
67-
t = t[1]
59+
proc flattenedName(node: NimNode): string =
60+
if node.kind in {nnkDotExpr, nnkBracketExpr}:
61+
result = flattenedName(node[0])
62+
result &= '_'
63+
result &= flattenedName(node[1])
64+
elif node.kind in {nnkIdent, nnkSym}:
65+
result = $node
66+
else:
67+
error "Expected a field nameor dot expression or array access but found \"" &
68+
t.repr()
6869

69-
alias &= $t
70+
let alias = flattenedName(t)
7071

7172
return (newIdentNode($alias & "_alias" & $id & '_'), isVar)
7273

src/arraymancer/nn_primitives/nnp_softmax.nim

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,11 @@ proc softmax*[T](input: Tensor[T]): Tensor[T] {.noInit.} =
2929
let batch_size = input.shape[0]
3030
result = zeros_like(input)
3131

32+
# TODO: the performance of nested parallel regions is highly suspect
3233
for i in 0||(batch_size-1):
3334
let (max, sumexp) = input[i, _].streaming_max_sumexp
3435

3536
var res_slice = result[i, _]
3637

37-
apply2_inline(res_slice, input[i, _]):
38-
stable_softmax(y, max, sumexp)
38+
forEachSerial r in res_slice, inpi in input[i, _]:
39+
r = stable_softmax(inpi, max, sumexp)

src/arraymancer/nn_primitives/nnp_softmax_cross_entropy.nim

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,13 +185,14 @@ proc sparse_softmax_cross_entropy_backward*[T; Idx: SomeNumber or byte or char o
185185
for i, truth_idx in enumerate(target):
186186
result[i, int(truth_idx)] = -1
187187

188+
# TODO: the performance of nested parallel regions is highly suspect
188189
for i in 0||(batch_size-1):
189190
let (max, sumexp) = cached_tensor[i, _].streaming_max_sumexp
190191

191192
var res_slice = result[i, _]
192193

193-
apply2_inline(res_slice, cached_tensor[i, _]):
194-
grad * (stable_softmax(y, max, sumexp) + x) / T(batch_size)
194+
forEachSerial r in res_slice, ci in cached_tensor[i, _]:
195+
r = grad * (stable_softmax(ci, max, sumexp) + r) / T(batch_size)
195196

196197

197198
# ################################################

0 commit comments

Comments
 (0)