-
Methods in org.robovm.apple.avfoundation with parameters of type CMTime
| Modifier and Type |
Method |
Description |
NSObject |
AVPlayer.addPeriodicTimeObserver(CMTime interval,
DispatchQueue queue,
VoidBlock1<CMTime> block) |
|
NSObject |
AVSampleBufferRenderSynchronizer.addPeriodicTimeObserver(CMTime interval,
DispatchQueue queue,
VoidBlock1<CMTime> block) |
|
boolean |
AVAssetWriterInputPixelBufferAdaptor.appendPixelBuffer(CVPixelBuffer pixelBuffer,
CMTime presentationTime) |
|
boolean |
AVMutableMovieTrack.appendSampleBuffer(CMSampleBuffer sampleBuffer,
CMTime outDecodeTime,
CMTime outPresentationTime,
NSError.NSErrorPtr outError) |
|
void |
AVPlayerInterstitialEventController.cancelCurrentEvent(CMTime resumptionOffset) |
|
void |
AVDelegatingPlaybackCoordinator.coordinateSeekToTime(CMTime time,
AVDelegatingPlaybackCoordinatorSeekOptions options) |
|
protected static long |
AVCaptureManualExposureBracketedStillImageSettings.create(CMTime duration,
float ISO) |
|
protected static long |
AVPlayerInterstitialEvent.create(AVPlayerItem primaryItem,
String identifier,
CMTime time,
NSArray<AVPlayerItem> templateItems,
AVPlayerInterstitialEventRestrictions restrictions,
CMTime resumptionOffset,
CMTime playoutLimit,
NSDictionary<?,?> userDefinedAttributes) |
|
protected static long |
AVPlayerInterstitialEvent.create(AVPlayerItem primaryItem,
String identifier,
NSDate date,
NSArray<AVPlayerItem> templateItems,
AVPlayerInterstitialEventRestrictions restrictions,
CMTime resumptionOffset,
CMTime playoutLimit,
NSDictionary<?,?> userDefinedAttributes) |
|
static NSValue |
NSValueExtensions.create(CMTime time) |
|
protected static NSValue |
NSValueExtensions.create(ObjCClass clazz,
CMTime time) |
|
void |
AVCaptureDepthDataOutputDelegate.didDropDepthData(AVCaptureDepthDataOutput output,
AVDepthData depthData,
CMTime timestamp,
AVCaptureConnection connection,
AVCaptureOutputDataDroppedReason reason) |
|
void |
AVCaptureDepthDataOutputDelegateAdapter.didDropDepthData(AVCaptureDepthDataOutput output,
AVDepthData depthData,
CMTime timestamp,
AVCaptureConnection connection,
AVCaptureOutputDataDroppedReason reason) |
|
void |
AVCapturePhotoCaptureDelegate.didFinishProcessingLivePhotoToMovieFileAtURL(AVCapturePhotoOutput output,
NSURL outputFileURL,
CMTime duration,
CMTime photoDisplayTime,
AVCaptureResolvedPhotoSettings resolvedSettings,
NSError error) |
|
void |
AVCapturePhotoCaptureDelegateAdapter.didFinishProcessingLivePhotoToMovieFileAtURL(AVCapturePhotoOutput output,
NSURL outputFileURL,
CMTime duration,
CMTime photoDisplayTime,
AVCaptureResolvedPhotoSettings resolvedSettings,
NSError error) |
|
void |
AVPlayerItemLegibleOutputPushDelegate.didOutputAttributedStrings(AVPlayerItemLegibleOutput output,
NSArray<NSAttributedString> strings,
List<CMSampleBuffer> nativeSamples,
CMTime itemTime) |
|
void |
AVPlayerItemLegibleOutputPushDelegateAdapter.didOutputAttributedStrings(AVPlayerItemLegibleOutput output,
NSArray<NSAttributedString> strings,
List<CMSampleBuffer> nativeSamples,
CMTime itemTime) |
|
void |
AVCaptureDepthDataOutputDelegate.didOutputDepthData(AVCaptureDepthDataOutput output,
AVDepthData depthData,
CMTime timestamp,
AVCaptureConnection connection) |
|
void |
AVCaptureDepthDataOutputDelegateAdapter.didOutputDepthData(AVCaptureDepthDataOutput output,
AVDepthData depthData,
CMTime timestamp,
AVCaptureConnection connection) |
|
static void |
NSCoderExtensions.encodeCMTime(NSCoder thiz,
CMTime time,
String key) |
|
void |
AVCoordinatedPlaybackSuspension.endProposingNewTime(CMTime time) |
|
void |
AVAssetWriter.endSession(CMTime endTime) |
|
CMTime |
AVPlaybackCoordinator.expectedItemTimeAtHostTime(CMTime hostClockTime) |
|
void |
AVSampleBufferAudioRenderer.flushFromSourceTime(CMTime time,
VoidBooleanBlock completionHandler) |
|
CGImage |
AVAssetImageGenerator.getCGImageAtTime(CMTime requestedTime,
CMTime actualTime) |
|
AVTimeRamp<CGRect> |
AVVideoCompositionLayerInstruction.getCropRectangleRamp(CMTime time) |
|
protected boolean |
AVVideoCompositionLayerInstruction.getCropRectangleRamp(CMTime time,
CGRect.CGRectPtr startCropRectangle,
CGRect.CGRectPtr endCropRectangle,
CMTimeRange.CMTimeRangePtr timeRange) |
|
AVTimeRamp<Float> |
AVVideoCompositionLayerInstruction.getOpacityRamp(CMTime time) |
|
protected boolean |
AVVideoCompositionLayerInstruction.getOpacityRamp(CMTime time,
FloatPtr startOpacity,
FloatPtr endOpacity,
CMTimeRange.CMTimeRangePtr timeRange) |
|
CVPixelBuffer |
AVPlayerItemVideoOutput.getPixelBufferForItemTime(CMTime itemTime,
CMTime outItemTimeForDisplay) |
|
CMTime |
AVAssetTrack.getSamplePresentationTime(CMTime trackTime) |
|
AVAssetTrackSegment |
AVAssetTrack.getSegment(CMTime trackTime) |
|
AVCompositionTrackSegment |
AVCompositionTrack.getSegment(CMTime trackTime) |
|
AVTimeRamp<CGAffineTransform> |
AVVideoCompositionLayerInstruction.getTransformRamp(CMTime time) |
|
protected boolean |
AVVideoCompositionLayerInstruction.getTransformRamp(CMTime time,
CGAffineTransform.CGAffineTransformPtr startTransform,
CGAffineTransform.CGAffineTransformPtr endTransform,
CMTimeRange.CMTimeRangePtr timeRange) |
|
AVTimeRamp<Float> |
AVAudioMixInputParameters.getVolumeRamp(CMTime time) |
|
protected boolean |
AVAudioMixInputParameters.getVolumeRamp(CMTime time,
FloatPtr startVolume,
FloatPtr endVolume,
CMTimeRange.CMTimeRangePtr timeRange) |
|
boolean |
AVPlayerItemVideoOutput.hasNewPixelBufferForItemTime(CMTime itemTime) |
|
boolean |
AVMutableComposition.insertTimeRange(CMTimeRange timeRange,
AVAsset asset,
CMTime startTime) |
|
boolean |
AVMutableCompositionTrack.insertTimeRange(CMTimeRange timeRange,
AVAssetTrack track,
CMTime startTime) |
|
boolean |
AVMutableMovie.insertTimeRange(CMTimeRange timeRange,
AVAsset asset,
CMTime startTime,
boolean copySampleData,
NSError.NSErrorPtr outError) |
|
boolean |
AVMutableMovieTrack.insertTimeRange(CMTimeRange timeRange,
AVAssetTrack track,
CMTime startTime,
boolean copySampleData,
NSError.NSErrorPtr outError) |
|
boolean |
AVMutableCompositionTrack.insertTimeRanges(List<CMTimeRange> timeRanges,
NSArray<AVAssetTrack> tracks,
CMTime startTime) |
|
void |
AVAssetTrack.loadSamplePresentationTimeForTrackTime(CMTime trackTime,
VoidBlock2<CMTime,NSError> completionHandler) |
|
void |
AVAssetTrack.loadSegmentForTrackTime(CMTime trackTime,
VoidBlock2<AVAssetTrackSegment,NSError> completionHandler) |
|
void |
AVSampleBufferRenderSynchronizer.removeRenderer(AVQueuedSampleBufferRendering renderer,
CMTime time,
VoidBooleanBlock completionHandler) |
|
void |
AVMutableComposition.scaleTimeRange(CMTimeRange timeRange,
CMTime duration) |
|
void |
AVMutableCompositionTrack.scaleTimeRange(CMTimeRange timeRange,
CMTime duration) |
|
void |
AVMutableMovie.scaleTimeRange(CMTimeRange timeRange,
CMTime duration) |
|
void |
AVMutableMovieTrack.scaleTimeRange(CMTimeRange timeRange,
CMTime duration) |
|
void |
AVPlayer.seekToTime(CMTime time) |
|
void |
AVPlayer.seekToTime(CMTime time,
CMTime toleranceBefore,
CMTime toleranceAfter) |
|
void |
AVPlayer.seekToTime(CMTime time,
CMTime toleranceBefore,
CMTime toleranceAfter,
VoidBooleanBlock completionHandler) |
|
void |
AVPlayer.seekToTime(CMTime time,
VoidBooleanBlock completionHandler) |
|
void |
AVPlayerItem.seekToTime(CMTime time) |
Deprecated.
|
void |
AVPlayerItem.seekToTime(CMTime time,
CMTime toleranceBefore,
CMTime toleranceAfter) |
Deprecated.
|
void |
AVPlayerItem.seekToTime(CMTime time,
CMTime toleranceBefore,
CMTime toleranceAfter,
VoidBooleanBlock completionHandler) |
|
void |
AVPlayerItem.seekToTime(CMTime time,
VoidBooleanBlock completionHandler) |
|
void |
AVCaptureDevice.setActiveDepthDataMinFrameDuration(CMTime v) |
|
void |
AVCaptureDevice.setActiveMaxExposureDuration(CMTime v) |
|
void |
AVCaptureDevice.setActiveVideoMaxFrameDuration(CMTime v) |
|
void |
AVCaptureDevice.setActiveVideoMinFrameDuration(CMTime v) |
|
void |
AVPlayerItem.setConfiguredTimeOffsetFromLive(CMTime v) |
|
void |
AVMutableVideoCompositionLayerInstruction.setCropRectangle(CGRect cropRectangle,
CMTime time) |
|
void |
AVMutableMetadataItem.setDuration(CMTime v) |
|
void |
AVCaptureDevice.setExposureModeCustom(CMTime duration,
float ISO,
VoidBlock1<CMTime> handler) |
|
void |
AVPlayerItem.setForwardPlaybackEndTime(CMTime v) |
|
void |
AVMutableVideoComposition.setFrameDuration(CMTime v) |
|
void |
AVAssetWriter.setInitialSegmentStartTime(CMTime v) |
|
void |
AVMutableMovie.setInterleavingPeriod(CMTime v) |
|
void |
AVCaptureFileOutput.setMaxRecordedDuration(CMTime v) |
|
void |
AVAssetWriter.setMovieFragmentInterval(CMTime v) |
|
void |
AVCaptureMovieFileOutput.setMovieFragmentInterval(CMTime v) |
|
void |
AVMutableVideoCompositionLayerInstruction.setOpacity(float opacity,
CMTime time) |
|
void |
AVAssetWriter.setOverallDurationHint(CMTime v) |
|
void |
AVAssetWriterInput.setPreferredMediaChunkDuration(CMTime v) |
|
void |
AVMutableMovieTrack.setPreferredMediaChunkDuration(CMTime v) |
|
void |
AVAssetWriter.setPreferredOutputSegmentInterval(CMTime v) |
|
void |
AVPlayer.setRate(float rate,
CMTime itemTime,
CMTime hostClockTime) |
|
void |
AVSampleBufferRenderSynchronizer.setRate(float rate,
CMTime time) |
|
void |
AVSampleBufferRenderSynchronizer.setRate(float rate,
CMTime time,
CMTime hostTime) |
|
void |
AVAssetImageGenerator.setRequestedTimeToleranceAfter(CMTime v) |
|
void |
AVAssetImageGenerator.setRequestedTimeToleranceBefore(CMTime v) |
|
void |
AVPlayerItem.setReversePlaybackEndTime(CMTime v) |
|
void |
AVOutputSettingsAssistant.setSourceVideoAverageFrameDuration(CMTime v) |
|
void |
AVOutputSettingsAssistant.setSourceVideoMinFrameDuration(CMTime v) |
|
void |
AVMutableMetadataItem.setTime(CMTime v) |
|
void |
AVMutableVideoCompositionLayerInstruction.setTransform(CGAffineTransform transform,
CMTime time) |
|
void |
AVCaptureConnection.setVideoMaxFrameDuration(CMTime v) |
Deprecated.
|
void |
AVCaptureConnection.setVideoMinFrameDuration(CMTime v) |
Deprecated.
|
void |
AVCaptureDeviceInput.setVideoMinFrameDurationOverride(CMTime v) |
|
void |
AVMutableAudioMixInputParameters.setVolume(float volume,
CMTime time) |
|
void |
AVAssetWriter.startSession(CMTime startTime) |
|
Method parameters in org.robovm.apple.avfoundation with type arguments of type CMTime
| Modifier and Type |
Method |
Description |
NSObject |
AVPlayer.addBoundaryTimeObserver(List<CMTime> times,
DispatchQueue queue,
Runnable block) |
|
NSObject |
AVPlayer.addPeriodicTimeObserver(CMTime interval,
DispatchQueue queue,
VoidBlock1<CMTime> block) |
|
NSObject |
AVSampleBufferRenderSynchronizer.addPeriodicTimeObserver(CMTime interval,
DispatchQueue queue,
VoidBlock1<CMTime> block) |
|
void |
AVAssetExportSession.estimateMaximumDuration(VoidBlock2<CMTime,NSError> handler) |
|
void |
AVAssetImageGenerator.generateCGImagesAsynchronously(List<CMTime> requestedTimes,
VoidBlock5<CMTime,CGImage,CMTime,AVAssetImageGeneratorResult,NSError> handler) |
|
void |
AVAssetImageGenerator.generateCGImagesAsynchronously(List<CMTime> requestedTimes,
VoidBlock5<CMTime,CGImage,CMTime,AVAssetImageGeneratorResult,NSError> handler) |
|
void |
AVAssetImageGenerator.generateCGImagesAsynchronously(List<CMTime> requestedTimes,
VoidBlock5<CMTime,CGImage,CMTime,AVAssetImageGeneratorResult,NSError> handler) |
|
void |
AVAssetTrack.loadSamplePresentationTimeForTrackTime(CMTime trackTime,
VoidBlock2<CMTime,NSError> completionHandler) |
|
void |
AVCaptureDevice.setExposureModeCustom(CMTime duration,
float ISO,
VoidBlock1<CMTime> handler) |
|
void |
AVCaptureDevice.setExposureTargetBias(float bias,
VoidBlock1<CMTime> handler) |
|
void |
AVCaptureDevice.setFocusModeLocked(float lensPosition,
VoidBlock1<CMTime> handler) |
|
void |
AVCaptureDevice.setWhiteBalanceModeLocked(AVCaptureWhiteBalanceGains whiteBalanceGains,
VoidBlock1<CMTime> handler) |
|
Constructors in org.robovm.apple.avfoundation with parameters of type CMTime
| Constructor |
Description |
AVCaptureManualExposureBracketedStillImageSettings(CMTime duration,
float ISO) |
|
AVPlayerInterstitialEvent(AVPlayerItem primaryItem,
String identifier,
CMTime time,
NSArray<AVPlayerItem> templateItems,
AVPlayerInterstitialEventRestrictions restrictions,
CMTime resumptionOffset,
CMTime playoutLimit,
NSDictionary<?,?> userDefinedAttributes) |
|
AVPlayerInterstitialEvent(AVPlayerItem primaryItem,
String identifier,
NSDate date,
NSArray<AVPlayerItem> templateItems,
AVPlayerInterstitialEventRestrictions restrictions,
CMTime resumptionOffset,
CMTime playoutLimit,
NSDictionary<?,?> userDefinedAttributes) |
|
-
-
Methods in org.robovm.apple.coremedia that return CMTime
| Modifier and Type |
Method |
Description |
CMTime |
CMTime.absoluteValue() |
|
CMTime |
CMTime.add(CMTime rhs) |
|
CMTime |
CMTime.clampToRange(CMTimeRange range) |
|
CMTime |
CMTime.convertScale(int newTimescale,
CMTimeRoundingMethod method) |
|
CMTime |
CMClockOrTimebase.convertTime(CMTime time,
CMClockOrTimebase toClockOrTimebase) |
|
static CMTime |
CMClockOrTimebase.convertTime(CMTime time,
CMClockOrTimebase fromClockOrTimebase,
CMClockOrTimebase toClockOrTimebase) |
|
static CMTime |
CMTime.create(double seconds,
int preferredTimescale) |
|
static CMTime |
CMTime.create(long value,
int timescale) |
|
static CMTime |
CMTime.create(long value,
int timescale,
long epoch) |
|
static CMTime |
CMTime.create(NSDictionary dictionaryRepresentation) |
|
static CMTime |
CMClock.createHostTimeFromSystemUnits(long hostTime) |
|
CMTime |
CMTime.foldIntoRange(CMTimeRange foldRange) |
|
CMTime |
CMClock.getAnchorReferenceTime() |
|
CMTime |
CMClock.getAnchorTime() |
|
CMTime |
CMClockOrTimebase.getAnchorTime() |
|
CMTime |
CMBufferQueueCallbacks.getDecodeTimeStamp(CMBuffer buffer) |
|
CMTime |
CMBufferQueueCallbacksAdapter.getDecodeTimeStamp(CMBuffer buffer) |
|
CMTime |
CMSampleBuffer.getDecodeTimeStamp() |
|
CMTime |
CMSampleTimingInfo.getDecodeTimeStamp() |
|
CMTime |
CMBufferQueue.getDuration() |
|
CMTime |
CMBufferQueueCallbacks.getDuration(CMBuffer buffer) |
|
CMTime |
CMBufferQueueCallbacksAdapter.getDuration(CMBuffer buffer) |
|
CMTime |
CMSampleBuffer.getDuration() |
|
CMTime |
CMSampleTimingInfo.getDuration() |
|
CMTime |
CMTimeRange.getDuration() |
|
CMTime |
CMTimeRange.getEnd() |
|
CMTime |
CMBufferQueue.getEndPresentationTimeStamp() |
|
CMTime |
CMBufferQueue.getFirstDecodeTimeStamp() |
|
CMTime |
CMBufferQueue.getFirstPresentationTimeStamp() |
|
CMTime |
CMTimeCodeFormatDescription.getFrameDuration() |
|
CMTime |
CMBufferQueue.getMaxPresentationTimeStamp() |
|
CMTime |
CMBufferQueue.getMinDecodeTimeStamp() |
|
CMTime |
CMBufferQueue.getMinPresentationTimeStamp() |
|
CMTime |
CMSampleBuffer.getOutputDecodeTimeStamp() |
|
CMTime |
CMSampleBuffer.getOutputDuration() |
|
CMTime |
CMSampleBuffer.getOutputPresentationTimeStamp() |
|
CMTime |
CMBufferQueueCallbacks.getPresentationTimeStamp(CMBuffer buffer) |
|
CMTime |
CMBufferQueueCallbacksAdapter.getPresentationTimeStamp(CMBuffer buffer) |
|
CMTime |
CMSampleBuffer.getPresentationTimeStamp() |
|
CMTime |
CMSampleTimingInfo.getPresentationTimeStamp() |
|
CMTime |
CMClockOrTimebase.getRelativeAnchorTime() |
|
CMTime |
CMTimeRange.getStart() |
|
CMTime |
CMClock.getTime() |
|
CMTime |
CMClockOrTimebase.getTime() |
|
CMTime |
CMTimebase.getTime() |
|
CMTime |
CMTimebase.getTime(int timescale,
CMTimeRoundingMethod method) |
|
CMTime |
CMSampleBufferAttachment.getTrimDurationAtEnd() |
|
CMTime |
CMSampleBufferAttachment.getTrimDurationAtStart() |
|
static CMTime |
CMTime.Indefinite() |
|
static CMTime |
CMTime.Invalid() |
|
CMTime |
CMTime.mapDurationFromRangeToRange(CMTimeRange fromRange,
CMTimeRange toRange) |
|
CMTime |
CMTime.mapTimeFromRangeToRange(CMTimeRange fromRange,
CMTimeRange toRange) |
|
CMTime |
CMTime.maximum(CMTime time2) |
|
CMTime |
CMTime.minimum(CMTime time2) |
|
CMTime |
CMTime.multiply(int multiplier) |
|
CMTime |
CMTime.multiplyByFloat64(double multiplier) |
|
CMTime |
CMTime.multiplyByRatio(int multiplier,
int divisor) |
|
static CMTime |
CMTime.NegativeInfinity() |
|
static CMTime |
CMTime.PositiveInfinity() |
|
CMTime |
CMTime.setEpoch(long epoch) |
|
CMTime |
CMTime.setFlags(CMTimeFlags flags) |
|
CMTime |
CMTime.setTimescale(int timescale) |
|
CMTime |
CMTime.setValue(long value) |
|
CMTime |
CMTime.subtract(CMTime rhs) |
|
static CMTime |
CMTime.Zero() |
|
Methods in org.robovm.apple.coremedia with parameters of type CMTime
| Modifier and Type |
Method |
Description |
CMTime |
CMTime.add(CMTime rhs) |
|
int |
CMTime.compare(CMTime time2) |
|
boolean |
CMTimeRange.containsTime(CMTime time) |
|
static long |
CMClock.convertHostTimeToSystemUnits(CMTime hostTime) |
|
CMTime |
CMClockOrTimebase.convertTime(CMTime time,
CMClockOrTimebase toClockOrTimebase) |
|
static CMTime |
CMClockOrTimebase.convertTime(CMTime time,
CMClockOrTimebase fromClockOrTimebase,
CMClockOrTimebase toClockOrTimebase) |
|
static CMTimeCodeFormatDescription |
CMTimeCodeFormatDescription.create(CMTimeCodeFormatType timeCodeFormatType,
CMTime frameDuration,
int frameQuanta,
CMTimeCodeFlags tcFlags,
CMTimeCodeFormatDescriptionExtension extensions) |
|
static CMTimeRange |
CMTimeRange.create(CMTime start,
CMTime duration) |
|
static CMSampleBuffer |
CMSampleBuffer.createAudioSampleBuffer(CMBlockBuffer dataBuffer,
boolean dataReady,
CMSampleBuffer.MakeDataReadyCallback callback,
CMFormatDescription formatDescription,
long numSamples,
CMTime sbufPTS,
AudioStreamPacketDescription[] packetDescriptions) |
|
static CMSampleBuffer |
CMSampleBuffer.createAudioSampleBuffer(CMBlockBuffer dataBuffer,
CMFormatDescription formatDescription,
long numSamples,
CMTime sbufPTS,
AudioStreamPacketDescription[] packetDescriptions) |
|
protected static OSStatus |
CMSampleBuffer.createAudioSampleBuffer0(CFAllocator allocator,
CMBlockBuffer dataBuffer,
boolean dataReady,
CMFormatDescription formatDescription,
long numSamples,
CMTime presentationTimeStamp,
AudioStreamPacketDescription packetDescriptions,
CMSampleBuffer.CMSampleBufferPtr sampleBufferOut,
Block1<CMSampleBuffer,OSStatus> makeDataReadyHandler) |
|
protected static OSStatus |
CMSampleBuffer.createAudioSampleBuffer0(CFAllocator allocator,
CMBlockBuffer dataBuffer,
boolean dataReady,
FunctionPtr makeDataReadyCallback,
long makeDataReadyRefcon,
CMFormatDescription formatDescription,
long numSamples,
CMTime presentationTimeStamp,
AudioStreamPacketDescription packetDescriptions,
CMSampleBuffer.CMSampleBufferPtr sampleBufferOut) |
|
protected static OSStatus |
CMSampleBuffer.createAudioSampleBuffer0(CFAllocator allocator,
CMBlockBuffer dataBuffer,
CMFormatDescription formatDescription,
long numSamples,
CMTime presentationTimeStamp,
AudioStreamPacketDescription packetDescriptions,
CMSampleBuffer.CMSampleBufferPtr sampleBufferOut) |
|
static CMTimeRange |
CMTimeRange.fromTimeToTime(CMTime start,
CMTime end) |
|
void |
CMBufferQueue.installTrigger(CMBufferQueue.TriggerCallback callback,
CMBufferQueueTriggerCondition triggerCondition,
CMTime triggerTime) |
|
protected OSStatus |
CMBufferQueue.installTrigger0(CMBufferQueueTriggerCondition condition,
CMTime time,
CMBufferQueueTriggerToken.CMBufferQueueTriggerTokenPtr triggerTokenOut,
VoidBlock1<CMBufferQueueTriggerToken> handler) |
|
protected OSStatus |
CMBufferQueue.installTrigger0(FunctionPtr callback,
long refcon,
CMBufferQueueTriggerCondition condition,
CMTime time,
CMBufferQueueTriggerToken.CMBufferQueueTriggerTokenPtr triggerTokenOut) |
|
CMTime |
CMTime.maximum(CMTime time2) |
|
CMTime |
CMTime.minimum(CMTime time2) |
|
void |
CMTimebase.setAnchorTime(CMTime timebaseTime,
CMTime immediateMasterTime) |
|
protected OSStatus |
CMTimebase.setAnchorTime0(CMTime timebaseTime,
CMTime immediateSourceTime) |
|
CMSampleTimingInfo |
CMSampleTimingInfo.setDecodeTimeStamp(CMTime decodeTimeStamp) |
|
CMSampleTimingInfo |
CMSampleTimingInfo.setDuration(CMTime duration) |
|
CMTimeRange |
CMTimeRange.setDuration(CMTime duration) |
|
void |
CMSampleBuffer.setOutputPresentationTimeStamp(CMTime outputPresentationTimeStamp) |
|
protected OSStatus |
CMSampleBuffer.setOutputPresentationTimeStamp0(CMTime outputPresentationTimeStamp) |
|
CMSampleTimingInfo |
CMSampleTimingInfo.setPresentationTimeStamp(CMTime presentationTimeStamp) |
|
void |
CMTimebase.setRateAndAnchorTime(double rate,
CMTime timebaseTime,
CMTime immediateMasterTime) |
|
protected OSStatus |
CMTimebase.setRateAndAnchorTime0(double rate,
CMTime timebaseTime,
CMTime immediateSourceTime) |
|
CMTimeRange |
CMTimeRange.setStart(CMTime start) |
|
void |
CMTimebase.setTime(CMTime time) |
|
protected OSStatus |
CMTimebase.setTime0(CMTime time) |
|
void |
CMTimebase.setTimerDispatchSourceNextFireTime(DispatchSource timerSource,
CMTime fireTime) |
|
protected OSStatus |
CMTimebase.setTimerDispatchSourceNextFireTime0(DispatchSource timerSource,
CMTime fireTime,
int flags) |
|
void |
CMTimebase.setTimerNextFireTime(NSTimer timer,
CMTime fireTime) |
|
protected OSStatus |
CMTimebase.setTimerNextFireTime0(NSTimer timer,
CMTime fireTime,
int flags) |
|
CMSampleBufferAttachment |
CMSampleBufferAttachment.setTrimDurationAtEnd(CMTime trimDurationAtEnd) |
|
CMSampleBufferAttachment |
CMSampleBufferAttachment.setTrimDurationAtStart(CMTime trimDurationAtStart) |
|
CMTime |
CMTime.subtract(CMTime rhs) |
|
Method parameters in org.robovm.apple.coremedia with type arguments of type CMTime
| Modifier and Type |
Method |
Description |
static NSObjectProtocol |
CMTimebase.Notifications.observeEffectiveRateChanged(CMTimebase object,
VoidBlock2<CMTimebase,CMTime> block) |
|
static NSObjectProtocol |
CMTimebase.Notifications.observeTimeJumped(CMTimebase object,
VoidBlock2<CMTimebase,CMTime> block) |
|
static NSObjectProtocol |
CMSampleBuffer.Notifications.observeUpcomingOutputPTSRangeChanged(CMSampleBuffer object,
VoidBlock4<CMSampleBuffer,Boolean,CMTime,CMTime> block) |
|
static NSObjectProtocol |
CMSampleBuffer.Notifications.observeUpcomingOutputPTSRangeChanged(CMSampleBuffer object,
VoidBlock4<CMSampleBuffer,Boolean,CMTime,CMTime> block) |
|
CMBufferHandlers |
CMBufferHandlers.setGetDecodeTimeStamp(Block1<CFType,CMTime> getDecodeTimeStamp) |
|
CMBufferHandlers |
CMBufferHandlers.setGetDuration(Block1<CFType,CMTime> getDuration) |
|
CMBufferHandlers |
CMBufferHandlers.setGetPresentationTimeStamp(Block1<CFType,CMTime> getPresentationTimeStamp) |
|
static long |
CMTime.AsValuedListMarshaler.toNative(List<CMTime> l,
long flags) |
|
Constructor parameters in org.robovm.apple.coremedia with type arguments of type CMTime
| Constructor |
Description |
CMBufferHandlers(long version,
Block1<CFType,CMTime> getDecodeTimeStamp,
Block1<CFType,CMTime> getPresentationTimeStamp,
Block1<CFType,CMTime> getDuration,
Block1<CFType,Boolean> isDataReady,
Block2<CFType,CFType,CFComparisonResult> compare,
String dataBecameReadyNotification,
Block1<CFType,Long> getSize) |
|
-
-
-
-