Share via


MLCTensor.Create Method

Definition

Overloads

Name Description
Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, Single, MLCDataType)
Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCTensorData, MLCDataType)
Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCTensorData)
Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCRandomInitializerType)
Create(IntPtr[], Boolean, UIntPtr, UIntPtr, MLCTensorData)
Create(IntPtr[], Boolean, UIntPtr, UIntPtr, MLCRandomInitializerType)
Create(UIntPtr, UIntPtr, UIntPtr, MLCTensorData)
Create(UIntPtr, UIntPtr, UIntPtr, MLCRandomInitializerType)
Create(UIntPtr, UIntPtr, UIntPtr)
Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr)
Create(IntPtr[], MLCRandomInitializerType, MLCDataType)
Create(IntPtr[], NSNumber, MLCDataType)
Create(IntPtr[], MLCRandomInitializerType)
Create(IntPtr[], MLCTensorData, MLCDataType)
Create(IntPtr[], MLCDataType)
Create(MLCTensorDescriptor, MLCTensorData)
Create(MLCTensorDescriptor, MLCRandomInitializerType)
Create(MLCTensorDescriptor, NSNumber)
Create(IntPtr[])
Create(MLCTensorDescriptor)

Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, Single, MLCDataType)

[Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:fillWithData:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr width, UIntPtr height, UIntPtr featureChannelCount, UIntPtr batchSize, float fillWithData, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:fillWithData:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * unativeint * single * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

width
UIntPtr

unativeint

height
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

fillWithData
Single
dataType
MLCDataType

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCTensorData, MLCDataType)

[Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:data:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr width, UIntPtr height, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCTensorData data, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:data:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * unativeint * MLCompute.MLCTensorData * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

width
UIntPtr

unativeint

height
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

dataType
MLCDataType

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCTensorData)

[Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:data:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr width, UIntPtr height, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCTensorData data);
[<Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:data:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * unativeint * MLCompute.MLCTensorData -> MLCompute.MLCTensor

Parameters

width
UIntPtr

unativeint

height
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr, MLCRandomInitializerType)

[Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:randomInitializerType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr width, UIntPtr height, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCRandomInitializerType randomInitializerType);
[<Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:randomInitializerType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * unativeint * MLCompute.MLCRandomInitializerType -> MLCompute.MLCTensor

Parameters

width
UIntPtr

unativeint

height
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

randomInitializerType
MLCRandomInitializerType

Returns

Attributes

Applies to

Create(IntPtr[], Boolean, UIntPtr, UIntPtr, MLCTensorData)

[Foundation.Export("tensorWithSequenceLengths:sortedSequences:featureChannelCount:batchSize:data:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor? Create(IntPtr[] sequenceLengths, bool sortedSequences, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCTensorData? data);
[<Foundation.Export("tensorWithSequenceLengths:sortedSequences:featureChannelCount:batchSize:data:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * bool * unativeint * unativeint * MLCompute.MLCTensorData -> MLCompute.MLCTensor

Parameters

sequenceLengths

IntPtr[]

nativeint[]

sortedSequences
Boolean
featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

Returns

Attributes

Applies to

Create(IntPtr[], Boolean, UIntPtr, UIntPtr, MLCRandomInitializerType)

[Foundation.Export("tensorWithSequenceLengths:sortedSequences:featureChannelCount:batchSize:randomInitializerType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor? Create(IntPtr[] sequenceLengths, bool sortedSequences, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCRandomInitializerType randomInitializerType);
[<Foundation.Export("tensorWithSequenceLengths:sortedSequences:featureChannelCount:batchSize:randomInitializerType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * bool * unativeint * unativeint * MLCompute.MLCRandomInitializerType -> MLCompute.MLCTensor

Parameters

sequenceLengths

IntPtr[]

nativeint[]

sortedSequences
Boolean
featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

randomInitializerType
MLCRandomInitializerType

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, MLCTensorData)

[Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:data:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr sequenceLength, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCTensorData? data);
[<Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:data:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * MLCompute.MLCTensorData -> MLCompute.MLCTensor

Parameters

sequenceLength
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, MLCRandomInitializerType)

[Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:randomInitializerType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr sequenceLength, UIntPtr featureChannelCount, UIntPtr batchSize, MLCompute.MLCRandomInitializerType randomInitializerType);
[<Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:randomInitializerType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * MLCompute.MLCRandomInitializerType -> MLCompute.MLCTensor

Parameters

sequenceLength
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

randomInitializerType
MLCRandomInitializerType

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr)

[Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr sequenceLength, UIntPtr featureChannelCount, UIntPtr batchSize);
[<Foundation.Export("tensorWithSequenceLength:featureChannelCount:batchSize:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint -> MLCompute.MLCTensor

Parameters

sequenceLength
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

Returns

Attributes

Applies to

Create(UIntPtr, UIntPtr, UIntPtr, UIntPtr)

[Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(UIntPtr width, UIntPtr height, UIntPtr featureChannelCount, UIntPtr batchSize);
[<Foundation.Export("tensorWithWidth:height:featureChannelCount:batchSize:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : unativeint * unativeint * unativeint * unativeint -> MLCompute.MLCTensor

Parameters

width
UIntPtr

unativeint

height
UIntPtr

unativeint

featureChannelCount
UIntPtr

unativeint

batchSize
UIntPtr

unativeint

Returns

Attributes

Applies to

Create(IntPtr[], MLCRandomInitializerType, MLCDataType)

[Foundation.Export("tensorWithShape:randomInitializerType:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape, MLCompute.MLCRandomInitializerType randomInitializerType, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithShape:randomInitializerType:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * MLCompute.MLCRandomInitializerType * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

randomInitializerType
MLCRandomInitializerType
dataType
MLCDataType

Returns

Attributes

Applies to

Create(IntPtr[], NSNumber, MLCDataType)

[Foundation.Export("tensorWithShape:fillWithData:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape, Foundation.NSNumber fillData, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithShape:fillWithData:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * Foundation.NSNumber * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

fillData
NSNumber
dataType
MLCDataType

Returns

Attributes

Applies to

Create(IntPtr[], MLCRandomInitializerType)

[Foundation.Export("tensorWithShape:randomInitializerType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape, MLCompute.MLCRandomInitializerType randomInitializerType);
[<Foundation.Export("tensorWithShape:randomInitializerType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * MLCompute.MLCRandomInitializerType -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

randomInitializerType
MLCRandomInitializerType

Returns

Attributes

Applies to

Create(IntPtr[], MLCTensorData, MLCDataType)

[Foundation.Export("tensorWithShape:data:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape, MLCompute.MLCTensorData data, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithShape:data:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * MLCompute.MLCTensorData * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

dataType
MLCDataType

Returns

Attributes

Applies to

Create(IntPtr[], MLCDataType)

[Foundation.Export("tensorWithShape:dataType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape, MLCompute.MLCDataType dataType);
[<Foundation.Export("tensorWithShape:dataType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] * MLCompute.MLCDataType -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

dataType
MLCDataType

Returns

Attributes

Applies to

Create(MLCTensorDescriptor, MLCTensorData)

[Foundation.Export("tensorWithDescriptor:data:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(MLCompute.MLCTensorDescriptor tensorDescriptor, MLCompute.MLCTensorData data);
[<Foundation.Export("tensorWithDescriptor:data:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : MLCompute.MLCTensorDescriptor * MLCompute.MLCTensorData -> MLCompute.MLCTensor

Parameters

tensorDescriptor
MLCTensorDescriptor

Returns

Attributes

Applies to

Create(MLCTensorDescriptor, MLCRandomInitializerType)

[Foundation.Export("tensorWithDescriptor:randomInitializerType:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(MLCompute.MLCTensorDescriptor tensorDescriptor, MLCompute.MLCRandomInitializerType randomInitializerType);
[<Foundation.Export("tensorWithDescriptor:randomInitializerType:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : MLCompute.MLCTensorDescriptor * MLCompute.MLCRandomInitializerType -> MLCompute.MLCTensor

Parameters

tensorDescriptor
MLCTensorDescriptor
randomInitializerType
MLCRandomInitializerType

Returns

Attributes

Applies to

Create(MLCTensorDescriptor, NSNumber)

[Foundation.Export("tensorWithDescriptor:fillWithData:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(MLCompute.MLCTensorDescriptor tensorDescriptor, Foundation.NSNumber fillData);
[<Foundation.Export("tensorWithDescriptor:fillWithData:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : MLCompute.MLCTensorDescriptor * Foundation.NSNumber -> MLCompute.MLCTensor

Parameters

tensorDescriptor
MLCTensorDescriptor
fillData
NSNumber

Returns

Attributes

Applies to

Create(IntPtr[])

[Foundation.Export("tensorWithShape:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(IntPtr[] shape);
[<Foundation.Export("tensorWithShape:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : nativeint[] -> MLCompute.MLCTensor

Parameters

shape

IntPtr[]

nativeint[]

Returns

Attributes

Applies to

Create(MLCTensorDescriptor)

[Foundation.Export("tensorWithDescriptor:")]
[ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)]
public static MLCompute.MLCTensor Create(MLCompute.MLCTensorDescriptor tensorDescriptor);
[<Foundation.Export("tensorWithDescriptor:")>]
[<ObjCRuntime.BindingImpl(ObjCRuntime.BindingImplOptions.GeneratedCode | ObjCRuntime.BindingImplOptions.Optimizable)>]
static member Create : MLCompute.MLCTensorDescriptor -> MLCompute.MLCTensor

Parameters

tensorDescriptor
MLCTensorDescriptor

Returns

Attributes

Applies to