Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SR-9684] Differentiable derived conformances: Redefinition of init() when all stored properties are let #52126

Closed
rxwei opened this issue Jan 17, 2019 · 1 comment
Assignees
Labels
bug A deviation from expected or documented behavior. Also: expected but undesirable behavior. swift for tensorflow

Comments

@rxwei
Copy link
Member

rxwei commented Jan 17, 2019

Previous ID SR-9684
Radar None
Original Reporter @rxwei
Type Bug
Status Resolved
Resolution Done
Additional Detail from JIRA
Votes 0
Component/s Swift for TensorFlow
Labels Bug
Assignee @dan-zheng
Priority Medium

md5: da7652f48df46db6a41370a508f7ee45

Issue Description:

import TensorFlow
public protocol Layer: Differentiable & KeyPathIterable
    where AllDifferentiableVariables: KeyPathIterable {
    /// The input type of the layer.
    associatedtype Input: Differentiable
    /// The output type of the layer.
    associatedtype Output: Differentiable

    /// Returns the output obtained from applying to an input.
    @differentiable(wrt: (self, .0))
    func applied(to input: Input) -> Output
}

public extension Layer {
    func valueWithPullback(at input: Input)
        -> (output: Output,
            pullback: (Output.CotangentVector)
                -> (layerGradient: CotangentVector, inputGradient: Input.CotangentVector)) {
        let (out, pullback) = _valueWithPullback(at: self, input, in: Self.applied(to:))
        return (out, pullback)
    }
}

public struct Dense<Scalar>: VectorNumeric, Layer
    where Scalar : FloatingPoint & Differentiable & TensorFlowScalar {
    public var weight: Tensor<Scalar>
    public var bias: Tensor<Scalar>

    @differentiable(wrt: (self, .0), vjp: _vjpApplied(to:))
    public func applied(to input: Tensor<Scalar>) -> Tensor<Scalar> {
        return matmul(input, weight) + bias
    }

    @usableFromInline
    func _vjpApplied(to input: Tensor<Scalar>)
        -> (Tensor<Scalar>, (Tensor<Scalar>) -> (Dense, Tensor<Scalar>)) {
      let r0 = matmul(input, weight)
      let r1 = r0 + bias
      func pullback(_ v: Tensor<Scalar>) -> (Dense, Tensor<Scalar>) {
          return (Dense(weight: matmul(input.transposed(), v), bias: v),
                  matmul(v, weight.transposed()))
      }
      return (r1, pullback)
    }
}

public extension Dense where Scalar : BinaryFloatingPoint,
                             Scalar.RawSignificand : FixedWidthInteger {
    init(inputSize: Int, outputSize: Int) {
        weight = Tensor(randomNormal: [Int32(inputSize), Int32(outputSize)])
        bias = Tensor(randomNormal: [Int32(outputSize)])
    }
}

struct Classifier: Layer {
    private static var rng = ARC4RandomNumberGenerator(seed: 42)
    let l1 = Dense<Float>(inputSize: 2, outputSize: 4)
    let l2 = Dense<Float>(inputSize: 4, outputSize: 1)
    func applied(to input: Tensor<Float>) -> Tensor<Float> {
        let h1 = l1.applied(to: input)
        return l2.applied(to: h1)
    }
}
@rxwei
Copy link
Member Author

rxwei commented Jan 17, 2019

The synthesized AST has two `init()`s.

internal struct Classifier : Layer {
  @_hasInitialValue private static var rng: ARC4RandomNumberGenerator
  @_hasInitialValue internal let l1: Dense<Float>
  @_hasInitialValue internal let l2: Dense<Float>
  internal func applied(to input: Tensor<Float>) -> Tensor<Float>
  internal init()
  internal typealias Input = Tensor<Float>
  internal typealias Output = Tensor<Float>
  @_fieldwiseProductSpace internal struct AllDifferentiableVariables : Differentiable, AdditiveArithmetic, KeyPathIterable, VectorNumeric {
    internal init(l1: Dense<Float>, l2: Dense<Float>)
    @_fieldwiseProductSpace internal typealias AllDifferentiableVariables = Classifier.AllDifferentiableVariables
    @_fieldwiseProductSpace internal typealias TangentVector = Classifier.AllDifferentiableVariables
    @_fieldwiseProductSpace internal typealias CotangentVector = Classifier.AllDifferentiableVariables
    internal func tangentVector(from cotangent: Classifier.AllDifferentiableVariables) -> Classifier.AllDifferentiableVariables
    internal typealias AllKeyPaths = [PartialKeyPath<Classifier.AllDifferentiableVariables>]
    internal var allKeyPaths: [PartialKeyPath<Classifier.AllDifferentiableVariables>] { get }
    internal static var zero: Classifier.AllDifferentiableVariables { get }
    internal static func + (lhs: Classifier.AllDifferentiableVariables, rhs: Classifier.AllDifferentiableVariables) -> Classifier.AllDifferentiableVariables
    internal static func - (lhs: Classifier.AllDifferentiableVariables, rhs: Classifier.AllDifferentiableVariables) -> Classifier.AllDifferentiableVariables
    @_implements(Equatable, ==(_:_:)) internal static func __derived_struct_equals(_ a: Classifier.AllDifferentiableVariables, _ b: Classifier.AllDifferentiableVariables) -> Bool
    internal typealias Scalar = Float
    internal static func * (lhs: Float, rhs: Classifier.AllDifferentiableVariables) -> Classifier.AllDifferentiableVariables
  }
  @_fieldwiseProductSpace internal typealias TangentVector = Classifier.AllDifferentiableVariables
  @_fieldwiseProductSpace internal typealias CotangentVector = Classifier.AllDifferentiableVariables
  internal var allDifferentiableVariables: Classifier.AllDifferentiableVariables { get set }
  internal func moved(along direction: Classifier.AllDifferentiableVariables) -> Classifier
  internal init()
  internal func tangentVector(from cotangent: Classifier.AllDifferentiableVariables) -> Classifier.AllDifferentiableVariables
  internal typealias AllKeyPaths = [PartialKeyPath<Classifier>]
  internal var allKeyPaths: [PartialKeyPath<Classifier>] { get }
}

@swift-ci swift-ci transferred this issue from apple/swift-issues Apr 25, 2022
This issue was closed.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug A deviation from expected or documented behavior. Also: expected but undesirable behavior. swift for tensorflow
Projects
None yet
Development

No branches or pull requests

1 participant