Add more things for audioworklet

This commit is contained in:
Fedor 2023-10-30 22:47:44 +02:00
parent 310f3adab2
commit 1b44b5877f
305 changed files with 7471 additions and 4176 deletions

View File

@ -239,8 +239,8 @@ class ConsumeBodyDoneObserver final : public nsIStreamLoaderObserver,
return NS_OK;
}
virtual void BlobStoreCompleted(MutableBlobStorage* aBlobStorage, Blob* aBlob,
nsresult aRv) override {
virtual void BlobStoreCompleted(MutableBlobStorage* aBlobStorage,
BlobImpl* aBlobImpl, nsresult aRv) override {
// On error.
if (NS_FAILED(aRv)) {
OnStreamComplete(nullptr, nullptr, aRv, 0, nullptr);
@ -251,7 +251,7 @@ class ConsumeBodyDoneObserver final : public nsIStreamLoaderObserver,
// consuming of the body.
mBodyConsumer->NullifyConsumeBodyPump();
mBodyConsumer->OnBlobResult(aBlob, mWorkerRef);
mBodyConsumer->OnBlobResult(aBlobImpl, mWorkerRef);
}
private:
@ -405,7 +405,7 @@ class FileCreationHandler final : public PromiseNativeHandler {
return;
}
mConsumer->OnBlobResult(blob, mWorkerRef);
mConsumer->OnBlobResult(blob->Impl(), mWorkerRef);
}
void RejectedCallback(JSContext* aCx, JS::Handle<JS::Value> aValue) override {
@ -531,8 +531,8 @@ void BodyConsumer::BeginConsumeBodyMainThread(ThreadSafeWorkerRef* aWorkerRef) {
nsCOMPtr<nsIStreamListener> listener;
if (mConsumeType == CONSUME_BLOB) {
listener = new MutableBlobStreamListener(
mBlobStorageType, nullptr, mBodyMimeType, p, mMainThreadEventTarget);
listener = new MutableBlobStreamListener(mBlobStorageType, mBodyMimeType, p,
mMainThreadEventTarget);
} else {
nsCOMPtr<nsIStreamLoader> loader;
rv = NS_NewStreamLoader(getter_AddRefs(loader), p);
@ -573,10 +573,11 @@ void BodyConsumer::BeginConsumeBodyMainThread(ThreadSafeWorkerRef* aWorkerRef) {
* been wrapped by FileCreationHandler). The blob is sent to the target thread
* and ContinueConsumeBody is called.
*/
void BodyConsumer::OnBlobResult(Blob* aBlob, ThreadSafeWorkerRef* aWorkerRef) {
void BodyConsumer::OnBlobResult(BlobImpl* aBlobImpl,
ThreadSafeWorkerRef* aWorkerRef) {
AssertIsOnMainThread();
DispatchContinueConsumeBlobBody(aBlob ? aBlob->Impl() : nullptr, aWorkerRef);
DispatchContinueConsumeBlobBody(aBlobImpl, aWorkerRef);
}
void BodyConsumer::DispatchContinueConsumeBlobBody(
@ -753,7 +754,10 @@ void BodyConsumer::ContinueConsumeBlobBody(BlobImpl* aBlobImpl,
if (!aShuttingDown) {
RefPtr<dom::Blob> blob = dom::Blob::Create(mGlobal, aBlobImpl);
MOZ_ASSERT(blob);
if (NS_WARN_IF(!blob)) {
localPromise->MaybeReject(NS_ERROR_FAILURE);
return;
}
localPromise->MaybeResolve(blob);
}

View File

@ -68,7 +68,8 @@ class BodyConsumer final : public nsIObserver,
void BeginConsumeBodyMainThread(ThreadSafeWorkerRef* aWorkerRef);
void OnBlobResult(Blob* aBlob, ThreadSafeWorkerRef* aWorkerRef = nullptr);
void OnBlobResult(BlobImpl* aBlobImpl,
ThreadSafeWorkerRef* aWorkerRef = nullptr);
void ContinueConsumeBody(nsresult aStatus, uint32_t aLength, uint8_t* aResult,
bool aShuttingDown = false);

View File

@ -254,6 +254,10 @@ class MOZ_STACK_CLASS FormDataParser {
mParentObject, reinterpret_cast<void*>(copy), body.Length(),
NS_ConvertUTF8toUTF16(mFilename), NS_ConvertUTF8toUTF16(mContentType),
/* aLastModifiedDate */ 0);
if (NS_WARN_IF(!file)) {
return false;
}
Optional<nsAString> dummy;
ErrorResult rv;
mFormData->Append(name, *file, dummy, rv);
@ -396,7 +400,7 @@ void BodyUtil::ConsumeArrayBuffer(JSContext* aCx,
}
// static
already_AddRefed<Blob> BodyUtil::ConsumeBlob(nsISupports* aParent,
already_AddRefed<Blob> BodyUtil::ConsumeBlob(nsIGlobalObject* aParent,
const nsString& aMimeType,
uint32_t aInputLength,
uint8_t* aInput,

View File

@ -34,7 +34,7 @@ class BodyUtil final {
* Creates an in-memory blob from an array. The blob takes ownership of
* |aInput|, which must be allocated by |malloc|.
*/
static already_AddRefed<Blob> ConsumeBlob(nsISupports* aParent,
static already_AddRefed<Blob> ConsumeBlob(nsIGlobalObject* aParent,
const nsString& aMimeType,
uint32_t aInputLength,
uint8_t* aInput, ErrorResult& aRv);

View File

@ -4,6 +4,7 @@
#include "ImageEncoder.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
#include "mozilla/dom/MemoryBlobImpl.h"
#include "mozilla/dom/WorkerPrivate.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/gfx/DataSurfaceHelpers.h"
@ -98,14 +99,10 @@ class EncodingCompleteEvent : public CancelableRunnable {
// We want to null out mEncodeCompleteCallback no matter what.
RefPtr<EncodeCompleteCallback> callback(mEncodeCompleteCallback.forget());
if (!mFailed) {
// The correct parentObject has to be set by the mEncodeCompleteCallback.
RefPtr<Blob> blob =
Blob::CreateMemoryBlob(nullptr, mImgData, mImgSize, mType);
MOZ_ASSERT(blob);
rv = callback->ReceiveBlob(blob.forget());
RefPtr<BlobImpl> blobImpl = new MemoryBlobImpl(mImgData, mImgSize, mType);
rv = callback->ReceiveBlobImpl(blobImpl.forget());
} else {
rv = callback->ReceiveBlob(nullptr);
rv = callback->ReceiveBlobImpl(nullptr);
}
return rv;
@ -247,7 +244,7 @@ nsresult ImageEncoder::ExtractDataFromLayersImageAsync(
new EncodingRunnable(aType, aOptions, nullptr, aImage, encoder,
completeEvent, imgIEncoder::INPUT_FORMAT_HOSTARGB,
size, aUsePlaceholder, aUsingCustomOptions);
return NS_DispatchToBackgroundThread(event.forget());
return NS_DispatchBackgroundTask(event.forget());
}
/* static */
@ -266,7 +263,7 @@ nsresult ImageEncoder::ExtractDataAsync(
nsCOMPtr<nsIRunnable> event = new EncodingRunnable(
aType, aOptions, std::move(aImageBuffer), nullptr, encoder, completeEvent,
aFormat, aSize, aUsePlaceholder, aUsingCustomOptions);
return NS_DispatchToBackgroundThread(event.forget());
return NS_DispatchBackgroundTask(event.forget());
}
/*static*/

View File

@ -100,15 +100,15 @@ class ImageEncoder {
/**
* The callback interface of ExtractDataAsync and
* ExtractDataFromLayersImageAsync. ReceiveBlob() is called on main thread when
* encoding is complete.
* ExtractDataFromLayersImageAsync. ReceiveBlobImpl() is called on main thread
* when encoding is complete.
*/
class EncodeCompleteCallback {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodeCompleteCallback)
MOZ_CAN_RUN_SCRIPT
virtual nsresult ReceiveBlob(already_AddRefed<Blob> aBlob) = 0;
virtual nsresult ReceiveBlobImpl(already_AddRefed<BlobImpl> aBlobImpl) = 0;
protected:
virtual ~EncodeCompleteCallback() {}

View File

@ -163,7 +163,7 @@ PostMessageEvent::Run() {
StructuredCloneHolder* holder;
if (mHolder.constructed<StructuredCloneHolder>()) {
mHolder.ref<StructuredCloneHolder>().Read(ToSupports(targetWindow), cx,
mHolder.ref<StructuredCloneHolder>().Read(targetWindow->AsGlobal(), cx,
&messageData, rv);
holder = &mHolder.ref<StructuredCloneHolder>();
} else {

View File

@ -230,7 +230,7 @@ StructuredCloneHolder::StructuredCloneHolder(
: StructuredCloneHolderBase(aScope),
mSupportsCloning(aSupportsCloning == CloningSupported),
mSupportsTransferring(aSupportsTransferring == TransferringSupported),
mParent(nullptr)
mGlobal(nullptr)
#ifdef DEBUG
,
mCreationEventTarget(GetCurrentThreadEventTarget())
@ -260,13 +260,13 @@ void StructuredCloneHolder::Write(JSContext* aCx, JS::Handle<JS::Value> aValue,
}
}
void StructuredCloneHolder::Read(nsISupports* aParent, JSContext* aCx,
void StructuredCloneHolder::Read(nsIGlobalObject* aGlobal, JSContext* aCx,
JS::MutableHandle<JS::Value> aValue,
ErrorResult& aRv) {
MOZ_ASSERT(aParent);
MOZ_ASSERT(aGlobal);
mozilla::AutoRestore<nsISupports*> guard(mParent);
mParent = aParent;
mozilla::AutoRestore<nsIGlobalObject*> guard(mGlobal);
mGlobal = aGlobal;
if (!StructuredCloneHolderBase::Read(aCx, aValue)) {
JS_ClearPendingException(aCx);
@ -284,23 +284,25 @@ void StructuredCloneHolder::Read(nsISupports* aParent, JSContext* aCx,
}
}
void StructuredCloneHolder::ReadFromBuffer(nsISupports* aParent, JSContext* aCx,
void StructuredCloneHolder::ReadFromBuffer(nsIGlobalObject* aGlobal,
JSContext* aCx,
JSStructuredCloneData& aBuffer,
JS::MutableHandle<JS::Value> aValue,
ErrorResult& aRv) {
ReadFromBuffer(aParent, aCx, aBuffer, JS_STRUCTURED_CLONE_VERSION, aValue,
ReadFromBuffer(aGlobal, aCx, aBuffer, JS_STRUCTURED_CLONE_VERSION, aValue,
aRv);
}
void StructuredCloneHolder::ReadFromBuffer(nsISupports* aParent, JSContext* aCx,
void StructuredCloneHolder::ReadFromBuffer(nsIGlobalObject* aGlobal,
JSContext* aCx,
JSStructuredCloneData& aBuffer,
uint32_t aAlgorithmVersion,
JS::MutableHandle<JS::Value> aValue,
ErrorResult& aRv) {
MOZ_ASSERT(!mBuffer, "ReadFromBuffer() must be called without a Write().");
mozilla::AutoRestore<nsISupports*> guard(mParent);
mParent = aParent;
mozilla::AutoRestore<nsIGlobalObject*> guard(mGlobal);
mGlobal = aGlobal;
if (!JS_ReadStructuredClone(aCx, aBuffer, aAlgorithmVersion,
mStructuredCloneScope, aValue, &sCallbacks,
@ -340,10 +342,10 @@ JSObject* StructuredCloneHolder::ReadFullySerializableObjects(
// the casting between JSPrincipals* and nsIPrincipal* we can't use
// getter_AddRefs above and have to already_AddRefed here.
nsCOMPtr<nsIPrincipal> principal =
already_AddRefed<nsIPrincipal>(nsJSPrincipals::get(prin));
already_AddRefed<nsIPrincipal>(nsJSPrincipals::get(prin));
nsresult rv = nsContentUtils::WrapNative(
aCx, principal, &NS_GET_IID(nsIPrincipal), &result);
aCx, principal, &NS_GET_IID(nsIPrincipal), &result);
if (NS_FAILED(rv)) {
xpc::Throw(aCx, NS_ERROR_DOM_DATA_CLONE_ERR);
return nullptr;
@ -434,7 +436,11 @@ JSObject* ReadBlob(JSContext* aCx, uint32_t aIndex,
// pointer while destructors are running.
RefPtr<BlobImpl> blobImpl = aHolder->BlobImpls()[aIndex];
RefPtr<Blob> blob = Blob::Create(aHolder->ParentDuringRead(), blobImpl);
RefPtr<Blob> blob = Blob::Create(aHolder->GlobalDuringRead(), blobImpl);
if (NS_WARN_IF(!blob)) {
return nullptr;
}
if (!ToJSValue(aCx, blob, &val)) {
return nullptr;
}
@ -499,7 +505,7 @@ already_AddRefed<Directory> ReadDirectoryInternal(
}
RefPtr<Directory> directory =
Directory::Create(aHolder->ParentDuringRead(), file);
Directory::Create(aHolder->GlobalDuringRead(), file);
return directory.forget();
}
@ -538,7 +544,7 @@ JSObject* ReadFileList(JSContext* aCx, JSStructuredCloneReader* aReader,
JS::Rooted<JS::Value> val(aCx);
{
RefPtr<FileList> fileList = new FileList(aHolder->ParentDuringRead());
RefPtr<FileList> fileList = new FileList(aHolder->GlobalDuringRead());
uint32_t zero, index;
// |index| is the index of the first blobImpl.
@ -561,7 +567,11 @@ JSObject* ReadFileList(JSContext* aCx, JSStructuredCloneReader* aReader,
RefPtr<BlobImpl> blobImpl = aHolder->BlobImpls()[pos];
MOZ_ASSERT(blobImpl->IsFile());
RefPtr<File> file = File::Create(aHolder->ParentDuringRead(), blobImpl);
RefPtr<File> file = File::Create(aHolder->GlobalDuringRead(), blobImpl);
if (NS_WARN_IF(!file)) {
return nullptr;
}
if (!fileList->Append(file)) {
return nullptr;
}
@ -613,7 +623,7 @@ JSObject* ReadFormData(JSContext* aCx, JSStructuredCloneReader* aReader,
// See the serialization of the FormData for the format.
JS::Rooted<JS::Value> val(aCx);
{
RefPtr<FormData> formData = new FormData(aHolder->ParentDuringRead());
RefPtr<FormData> formData = new FormData(aHolder->GlobalDuringRead());
Optional<nsAString> thirdArg;
for (uint32_t i = 0; i < aCount; ++i) {
@ -637,8 +647,10 @@ JSObject* ReadFormData(JSContext* aCx, JSStructuredCloneReader* aReader,
RefPtr<BlobImpl> blobImpl = aHolder->BlobImpls()[indexOrLengthOfString];
RefPtr<Blob> blob = Blob::Create(aHolder->ParentDuringRead(), blobImpl);
MOZ_ASSERT(blob);
RefPtr<Blob> blob = Blob::Create(aHolder->GlobalDuringRead(), blobImpl);
if (NS_WARN_IF(!blob)) {
return nullptr;
}
ErrorResult rv;
formData->Append(name, *blob, thirdArg, rv);
@ -807,7 +819,7 @@ JSObject* ReadInputStream(JSContext* aCx, uint32_t aIndex,
nsCOMPtr<nsIInputStream> inputStream = aHolder->InputStreams()[aIndex];
nsresult rv = nsContentUtils::WrapNative(
aCx, inputStream, &NS_GET_IID(nsIInputStream), &result);
aCx, inputStream, &NS_GET_IID(nsIInputStream), &result);
if (NS_FAILED(rv)) {
return nullptr;
}
@ -863,9 +875,8 @@ JSObject* StructuredCloneHolder::CustomReadHandler(
// This can be null.
JS::RootedObject result(aCx);
{
nsCOMPtr<nsIGlobalObject> parent = do_QueryInterface(mParent);
// aIndex is the index of the cloned image.
result = ImageBitmap::ReadStructuredClone(aCx, aReader, parent,
result = ImageBitmap::ReadStructuredClone(aCx, aReader, mGlobal,
GetSurfaces(), aIndex);
}
return result;
@ -982,12 +993,10 @@ bool StructuredCloneHolder::CustomReadTransferHandler(
}
#endif
MOZ_ASSERT(aExtraData < mPortIdentifiers.Length());
const MessagePortIdentifier& portIdentifier = mPortIdentifiers[aExtraData];
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(mParent);
UniqueMessagePortId portIdentifier(mPortIdentifiers[aExtraData]);
ErrorResult rv;
RefPtr<MessagePort> port = MessagePort::Create(global, portIdentifier, rv);
RefPtr<MessagePort> port = MessagePort::Create(mGlobal, portIdentifier, rv);
if (NS_WARN_IF(rv.Failed())) {
rv.SuppressException();
return false;
@ -1011,9 +1020,8 @@ bool StructuredCloneHolder::CustomReadTransferHandler(
MOZ_ASSERT(aContent);
OffscreenCanvasCloneData* data =
static_cast<OffscreenCanvasCloneData*>(aContent);
nsCOMPtr<nsIGlobalObject> parent = do_QueryInterface(mParent);
RefPtr<OffscreenCanvas> canvas =
OffscreenCanvas::CreateFromCloneData(parent, data);
OffscreenCanvas::CreateFromCloneData(mGlobal, data);
delete data;
JS::Rooted<JS::Value> value(aCx);
@ -1031,8 +1039,8 @@ bool StructuredCloneHolder::CustomReadTransferHandler(
StructuredCloneScope::SameProcessDifferentThread) {
MOZ_ASSERT(aContent);
ImageBitmapCloneData* data = static_cast<ImageBitmapCloneData*>(aContent);
nsCOMPtr<nsIGlobalObject> parent = do_QueryInterface(mParent);
RefPtr<ImageBitmap> bitmap = ImageBitmap::CreateFromCloneData(parent, data);
RefPtr<ImageBitmap> bitmap =
ImageBitmap::CreateFromCloneData(mGlobal, data);
delete data;
JS::Rooted<JS::Value> value(aCx);
@ -1062,15 +1070,16 @@ bool StructuredCloneHolder::CustomWriteTransferHandler(
MessagePort* port = nullptr;
nsresult rv = UNWRAP_OBJECT(MessagePort, &obj, port);
if (NS_SUCCEEDED(rv)) {
// We use aExtraData to store the index of this new port identifier.
*aExtraData = mPortIdentifiers.Length();
MessagePortIdentifier* identifier = mPortIdentifiers.AppendElement();
if (!port->CanBeCloned()) {
return false;
}
port->CloneAndDisentangle(*identifier);
UniqueMessagePortId identifier;
port->CloneAndDisentangle(identifier);
// We use aExtraData to store the index of this new port identifier.
*aExtraData = mPortIdentifiers.Length();
mPortIdentifiers.AppendElement(identifier.release());
*aTag = SCTAG_DOM_MAP_MESSAGEPORT;
*aOwnership = JS::SCTAG_TMO_CUSTOM;

View File

@ -12,13 +12,13 @@
#include "mozilla/Move.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "nsISupports.h"
#include "nsTArray.h"
#ifdef DEBUG
# include "nsIThread.h"
#endif
class nsIGlobalObject;
class nsIInputStream;
namespace mozilla {
@ -162,7 +162,7 @@ class StructuredCloneHolder : public StructuredCloneHolderBase {
JS::Handle<JS::Value> aTransfer,
JS::CloneDataPolicy cloneDataPolicy, ErrorResult& aRv);
void Read(nsISupports* aParent, JSContext* aCx,
void Read(nsIGlobalObject* aGlobal, JSContext* aCx,
JS::MutableHandle<JS::Value> aValue, ErrorResult& aRv);
// Call this method to know if this object is keeping some DOM object alive.
@ -191,9 +191,9 @@ class StructuredCloneHolder : public StructuredCloneHolderBase {
StructuredCloneScope CloneScope() const { return mStructuredCloneScope; }
// The parent object is set internally just during the Read(). This method
// The global object is set internally just during the Read(). This method
// can be used by read functions to retrieve it.
nsISupports* ParentDuringRead() const { return mParent; }
nsIGlobalObject* GlobalDuringRead() const { return mGlobal; }
// This must be called if the transferring has ports generated by Read().
// MessagePorts are not thread-safe and they must be retrieved in the thread
@ -270,11 +270,11 @@ class StructuredCloneHolder : public StructuredCloneHolderBase {
// If you receive a buffer from IPC, you can use this method to retrieve a
// JS::Value. It can happen that you want to pre-populate the array of Blobs
// and/or the PortIdentifiers.
void ReadFromBuffer(nsISupports* aParent, JSContext* aCx,
void ReadFromBuffer(nsIGlobalObject* aGlobal, JSContext* aCx,
JSStructuredCloneData& aBuffer,
JS::MutableHandle<JS::Value> aValue, ErrorResult& aRv);
void ReadFromBuffer(nsISupports* aParent, JSContext* aCx,
void ReadFromBuffer(nsIGlobalObject* aGlobal, JSContext* aCx,
JSStructuredCloneData& aBuffer,
uint32_t aAlgorithmVersion,
JS::MutableHandle<JS::Value> aValue, ErrorResult& aRv);
@ -304,7 +304,7 @@ class StructuredCloneHolder : public StructuredCloneHolderBase {
nsTArray<RefPtr<gfx::DataSourceSurface>> mClonedSurfaces;
// This raw pointer is only set within ::Read() and is unset by the end.
nsISupports* MOZ_NON_OWNING_REF mParent;
nsIGlobalObject* MOZ_NON_OWNING_REF mGlobal;
// This array contains the ports once we've finished the reading. It's
// generated from the mPortIdentifiers array.

View File

@ -286,8 +286,10 @@ nsresult nsDOMDataChannel::DoOnMessageAvailable(const nsACString& aData,
if (aBinary) {
if (mBinaryType == DC_BINARY_TYPE_BLOB) {
RefPtr<Blob> blob =
Blob::CreateStringBlob(GetOwner(), aData, EmptyString());
MOZ_ASSERT(blob);
Blob::CreateStringBlob(GetOwnerGlobal(), aData, EmptyString());
if (NS_WARN_IF(!blob)) {
return NS_ERROR_FAILURE;
}
if (!ToJSValue(cx, blob, &jsData)) {
return NS_ERROR_FAILURE;

View File

@ -53,6 +53,8 @@ LOCAL_INCLUDES += [
'/js/xpconnect/wrappers',
]
include('/ipc/chromium/chromium-config.mozbuild')
if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
CXXFLAGS += ['-Wno-error=shadow']

View File

@ -31,19 +31,19 @@ void CanvasRenderingContextHelper::ToBlob(
// This is called on main thread.
MOZ_CAN_RUN_SCRIPT
nsresult ReceiveBlob(already_AddRefed<Blob> aBlob) override {
RefPtr<Blob> blob = aBlob;
nsresult ReceiveBlobImpl(already_AddRefed<BlobImpl> aBlobImpl) override {
RefPtr<BlobImpl> blobImpl = aBlobImpl;
RefPtr<Blob> newBlob;
RefPtr<Blob> blob;
if (blob) {
newBlob = Blob::Create(mGlobal, blob->Impl());
if (blobImpl) {
blob = Blob::Create(mGlobal, blobImpl);
}
RefPtr<BlobCallback> callback(mBlobCallback.forget());
ErrorResult rv;
callback->Call(newBlob, rv);
callback->Call(blob, rv);
mGlobal = nullptr;
MOZ_ASSERT(!mBlobCallback);

View File

@ -227,12 +227,16 @@ already_AddRefed<Promise> OffscreenCanvas::ToBlob(JSContext* aCx,
: mGlobal(aGlobal), mPromise(aPromise) {}
// This is called on main thread.
nsresult ReceiveBlob(already_AddRefed<Blob> aBlob) override {
RefPtr<Blob> blob = aBlob;
nsresult ReceiveBlobImpl(already_AddRefed<BlobImpl> aBlobImpl) override {
RefPtr<BlobImpl> blobImpl = aBlobImpl;
if (mPromise) {
RefPtr<Blob> newBlob = Blob::Create(mGlobal, blob->Impl());
mPromise->MaybeResolve(newBlob);
RefPtr<Blob> blob = Blob::Create(mGlobal, blobImpl);
if (NS_WARN_IF(!blob)) {
mPromise->MaybeReject(NS_ERROR_FAILURE);
} else {
mPromise->MaybeResolve(blob);
}
}
mGlobal = nullptr;

View File

@ -1927,59 +1927,62 @@ uint64_t IndexedBufferBinding::ByteCount() const {
////////////////////////////////////////
ScopedUnpackReset::ScopedUnpackReset(const WebGLContext* const webgl)
: ScopedGLWrapper<ScopedUnpackReset>(webgl->gl), mWebGL(webgl) {
: mWebGL(webgl) {
const auto& gl = mWebGL->gl;
// clang-format off
if (mWebGL->mPixelStore_UnpackAlignment != 4) mGL->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, 4);
if (mWebGL->mPixelStore_UnpackAlignment != 4) gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, 4);
if (mWebGL->IsWebGL2()) {
if (mWebGL->mPixelStore_UnpackRowLength != 0) mGL->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH , 0);
if (mWebGL->mPixelStore_UnpackImageHeight != 0) mGL->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, 0);
if (mWebGL->mPixelStore_UnpackSkipPixels != 0) mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_PIXELS , 0);
if (mWebGL->mPixelStore_UnpackSkipRows != 0) mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS , 0);
if (mWebGL->mPixelStore_UnpackSkipImages != 0) mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES , 0);
if (mWebGL->IsWebGL2()) {
if (mWebGL->mPixelStore_UnpackRowLength != 0) gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH , 0);
if (mWebGL->mPixelStore_UnpackImageHeight != 0) gl->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, 0);
if (mWebGL->mPixelStore_UnpackSkipPixels != 0) gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_PIXELS , 0);
if (mWebGL->mPixelStore_UnpackSkipRows != 0) gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS , 0);
if (mWebGL->mPixelStore_UnpackSkipImages != 0) gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES , 0);
if (mWebGL->mBoundPixelUnpackBuffer) mGL->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER, 0);
}
if (mWebGL->mBoundPixelUnpackBuffer) gl->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER, 0);
}
// clang-format on
}
void ScopedUnpackReset::UnwrapImpl() {
ScopedUnpackReset::~ScopedUnpackReset() {
const auto& gl = mWebGL->gl;
// clang-format off
mGL->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, mWebGL->mPixelStore_UnpackAlignment);
gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, mWebGL->mPixelStore_UnpackAlignment);
if (mWebGL->IsWebGL2()) {
mGL->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH , mWebGL->mPixelStore_UnpackRowLength );
mGL->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, mWebGL->mPixelStore_UnpackImageHeight);
mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_PIXELS , mWebGL->mPixelStore_UnpackSkipPixels );
mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS , mWebGL->mPixelStore_UnpackSkipRows );
mGL->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES , mWebGL->mPixelStore_UnpackSkipImages );
if (mWebGL->IsWebGL2()) {
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH , mWebGL->mPixelStore_UnpackRowLength );
gl->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, mWebGL->mPixelStore_UnpackImageHeight);
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_PIXELS , mWebGL->mPixelStore_UnpackSkipPixels );
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS , mWebGL->mPixelStore_UnpackSkipRows );
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES , mWebGL->mPixelStore_UnpackSkipImages );
GLuint pbo = 0;
if (mWebGL->mBoundPixelUnpackBuffer) {
pbo = mWebGL->mBoundPixelUnpackBuffer->mGLName;
}
mGL->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER, pbo);
GLuint pbo = 0;
if (mWebGL->mBoundPixelUnpackBuffer) {
pbo = mWebGL->mBoundPixelUnpackBuffer->mGLName;
}
gl->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER, pbo);
}
// clang-format on
}
////////////////////
void ScopedFBRebinder::UnwrapImpl() {
ScopedFBRebinder::~ScopedFBRebinder() {
const auto fnName = [&](WebGLFramebuffer* fb) {
return fb ? fb->mGLName : 0;
};
const auto& gl = mWebGL->gl;
if (mWebGL->IsWebGL2()) {
mGL->fBindFramebuffer(LOCAL_GL_DRAW_FRAMEBUFFER,
fnName(mWebGL->mBoundDrawFramebuffer));
mGL->fBindFramebuffer(LOCAL_GL_READ_FRAMEBUFFER,
fnName(mWebGL->mBoundReadFramebuffer));
gl->fBindFramebuffer(LOCAL_GL_DRAW_FRAMEBUFFER,
fnName(mWebGL->mBoundDrawFramebuffer));
gl->fBindFramebuffer(LOCAL_GL_READ_FRAMEBUFFER,
fnName(mWebGL->mBoundReadFramebuffer));
} else {
MOZ_ASSERT(mWebGL->mBoundDrawFramebuffer == mWebGL->mBoundReadFramebuffer);
mGL->fBindFramebuffer(LOCAL_GL_FRAMEBUFFER,
fnName(mWebGL->mBoundDrawFramebuffer));
gl->fBindFramebuffer(LOCAL_GL_FRAMEBUFFER,
fnName(mWebGL->mBoundDrawFramebuffer));
}
}
@ -1996,17 +1999,15 @@ static GLenum TargetIfLazy(GLenum target) {
}
}
ScopedLazyBind::ScopedLazyBind(gl::GLContext* gl, GLenum target,
ScopedLazyBind::ScopedLazyBind(gl::GLContext* const gl, const GLenum target,
const WebGLBuffer* buf)
: ScopedGLWrapper<ScopedLazyBind>(gl),
mTarget(buf ? TargetIfLazy(target) : 0),
mBuf(buf) {
: mGL(gl), mTarget(buf ? TargetIfLazy(target) : 0), mBuf(buf) {
if (mTarget) {
mGL->fBindBuffer(mTarget, mBuf->mGLName);
}
}
void ScopedLazyBind::UnwrapImpl() {
ScopedLazyBind::~ScopedLazyBind() {
if (mTarget) {
mGL->fBindBuffer(mTarget, 0);
}

View File

@ -2075,44 +2075,33 @@ bool ValidateTexImageTarget(WebGLContext* webgl, uint8_t funcDims,
TexImageTarget* const out_texImageTarget,
WebGLTexture** const out_tex);
class ScopedUnpackReset final : public gl::ScopedGLWrapper<ScopedUnpackReset> {
friend struct gl::ScopedGLWrapper<ScopedUnpackReset>;
class ScopedUnpackReset final {
private:
const WebGLContext* const mWebGL;
public:
explicit ScopedUnpackReset(const WebGLContext* webgl);
private:
void UnwrapImpl();
~ScopedUnpackReset();
};
class ScopedFBRebinder final : public gl::ScopedGLWrapper<ScopedFBRebinder> {
friend struct gl::ScopedGLWrapper<ScopedFBRebinder>;
class ScopedFBRebinder final {
private:
const WebGLContext* const mWebGL;
public:
explicit ScopedFBRebinder(const WebGLContext* const webgl)
: ScopedGLWrapper<ScopedFBRebinder>(webgl->gl), mWebGL(webgl) {}
private:
void UnwrapImpl();
explicit ScopedFBRebinder(const WebGLContext* const webgl) : mWebGL(webgl) {}
~ScopedFBRebinder();
};
class ScopedLazyBind final : public gl::ScopedGLWrapper<ScopedLazyBind> {
friend struct gl::ScopedGLWrapper<ScopedLazyBind>;
class ScopedLazyBind final {
private:
gl::GLContext* const mGL;
const GLenum mTarget;
const WebGLBuffer* const mBuf;
public:
ScopedLazyBind(gl::GLContext* gl, GLenum target, const WebGLBuffer* buf);
private:
void UnwrapImpl();
~ScopedLazyBind();
};
////

View File

@ -1735,25 +1735,25 @@ ScopedCopyTexImageSource::ScopedCopyTexImageSource(
// Now create the swizzled FB we'll be exposing.
GLuint rgbaRB = 0;
gl->fGenRenderbuffers(1, &rgbaRB);
gl::ScopedBindRenderbuffer scopedRB(gl, rgbaRB);
gl->fRenderbufferStorage(LOCAL_GL_RENDERBUFFER, sizedFormat, srcWidth,
srcHeight);
GLuint rgbaFB = 0;
gl->fGenFramebuffers(1, &rgbaFB);
gl->fBindFramebuffer(LOCAL_GL_FRAMEBUFFER, rgbaFB);
gl->fFramebufferRenderbuffer(LOCAL_GL_FRAMEBUFFER, LOCAL_GL_COLOR_ATTACHMENT0,
LOCAL_GL_RENDERBUFFER, rgbaRB);
{
gl->fGenRenderbuffers(1, &rgbaRB);
gl::ScopedBindRenderbuffer scopedRB(gl, rgbaRB);
gl->fRenderbufferStorage(LOCAL_GL_RENDERBUFFER, sizedFormat, srcWidth,
srcHeight);
const GLenum status = gl->fCheckFramebufferStatus(LOCAL_GL_FRAMEBUFFER);
if (status != LOCAL_GL_FRAMEBUFFER_COMPLETE) {
MOZ_CRASH("GFX: Temp framebuffer is not complete.");
gl->fGenFramebuffers(1, &rgbaFB);
gl->fBindFramebuffer(LOCAL_GL_FRAMEBUFFER, rgbaFB);
gl->fFramebufferRenderbuffer(LOCAL_GL_FRAMEBUFFER,
LOCAL_GL_COLOR_ATTACHMENT0,
LOCAL_GL_RENDERBUFFER, rgbaRB);
const GLenum status = gl->fCheckFramebufferStatus(LOCAL_GL_FRAMEBUFFER);
if (status != LOCAL_GL_FRAMEBUFFER_COMPLETE) {
MOZ_CRASH("GFX: Temp framebuffer is not complete.");
}
}
// Restore RB binding.
scopedRB.Unwrap(); // This function should really have a better name.
// Draw-blit rgbaTex into rgbaFB.
const gfx::IntSize srcSize(srcWidth, srcHeight);
{
@ -1762,10 +1762,6 @@ ScopedCopyTexImageSource::ScopedCopyTexImageSource(
srcSize);
}
// Restore Tex2D binding and destroy the temp tex.
scopedBindTex.Unwrap();
scopedTex.Unwrap();
// Leave RB and FB alive, and FB bound.
mRB = rgbaRB;
mFB = rgbaFB;

View File

@ -71,7 +71,7 @@ namespace mozilla {
namespace dom {
struct ConsoleStructuredCloneData {
nsCOMPtr<nsISupports> mParent;
nsCOMPtr<nsIGlobalObject> mGlobal;
nsTArray<RefPtr<BlobImpl>> mBlobs;
};
@ -263,8 +263,9 @@ class ConsoleRunnable : public StructuredCloneHolderBase {
JS::Rooted<JS::Value> val(aCx);
{
RefPtr<Blob> blob = Blob::Create(mClonedData.mParent,
mClonedData.mBlobs.ElementAt(aIndex));
nsCOMPtr<nsIGlobalObject> global = mClonedData.mGlobal;
RefPtr<Blob> blob =
Blob::Create(global, mClonedData.mBlobs.ElementAt(aIndex));
if (!ToJSValue(aCx, blob, &val)) {
return nullptr;
}
@ -385,7 +386,7 @@ class ConsoleRunnable : public StructuredCloneHolderBase {
JS::Rooted<JS::Value> argumentsValue(aCx);
bool ok = Read(aCx, &argumentsValue);
mClonedData.mParent = nullptr;
mClonedData.mGlobal = nullptr;
if (!ok) {
return;
@ -586,7 +587,8 @@ class ConsoleWorkerRunnable : public WorkerProxyToMainThreadRunnable,
return;
}
RunConsole(jsapi.cx(), aWorkerPrivate, outerWindow, aWindow);
RunConsole(jsapi.cx(), aWindow->AsGlobal(), aWorkerPrivate, outerWindow,
aWindow);
}
void RunWindowless(WorkerPrivate* aWorkerPrivate) {
@ -617,7 +619,12 @@ class ConsoleWorkerRunnable : public WorkerProxyToMainThreadRunnable,
JSAutoRealm ar(cx, global);
RunConsole(cx, aWorkerPrivate, nullptr, nullptr);
nsCOMPtr<nsIGlobalObject> globalObject = xpc::NativeGlobal(global);
if (NS_WARN_IF(!globalObject)) {
return;
}
RunConsole(cx, globalObject, aWorkerPrivate, nullptr, nullptr);
}
void RunBackOnWorkerThreadForCleanup(WorkerPrivate* aWorkerPrivate) override {
@ -627,7 +634,8 @@ class ConsoleWorkerRunnable : public WorkerProxyToMainThreadRunnable,
}
// This method is called in the main-thread.
virtual void RunConsole(JSContext* aCx, WorkerPrivate* aWorkerPrivate,
virtual void RunConsole(JSContext* aCx, nsIGlobalObject* aGlobal,
WorkerPrivate* aWorkerPrivate,
nsPIDOMWindowOuter* aOuterWindow,
nsPIDOMWindowInner* aInnerWindow) = 0;
@ -652,9 +660,11 @@ class ConsoleCallDataWorkerRunnable final : public ConsoleWorkerRunnable {
private:
~ConsoleCallDataWorkerRunnable() override { MOZ_ASSERT(!mCallData); }
void RunConsole(JSContext* aCx, WorkerPrivate* aWorkerPrivate,
void RunConsole(JSContext* aCx, nsIGlobalObject* aGlobal,
WorkerPrivate* aWorkerPrivate,
nsPIDOMWindowOuter* aOuterWindow,
nsPIDOMWindowInner* aInnerWindow) override {
MOZ_ASSERT(aGlobal);
MOZ_ASSERT(aWorkerPrivate);
AssertIsOnMainThread();
@ -685,12 +695,11 @@ class ConsoleCallDataWorkerRunnable final : public ConsoleWorkerRunnable {
mCallData->SetIDs(id, innerID);
}
// Now we could have the correct window (if we are not window-less).
mClonedData.mParent = aInnerWindow;
mClonedData.mGlobal = aGlobal;
ProcessCallData(aCx, mConsoleData, mCallData);
mClonedData.mParent = nullptr;
mClonedData.mGlobal = nullptr;
}
virtual void ReleaseData() override { mCallData = nullptr; }
@ -762,17 +771,18 @@ class ConsoleProfileWorkerRunnable final : public ConsoleWorkerRunnable {
}
private:
void RunConsole(JSContext* aCx, WorkerPrivate* aWorkerPrivate,
void RunConsole(JSContext* aCx, nsIGlobalObject* aGlobal,
WorkerPrivate* aWorkerPrivate,
nsPIDOMWindowOuter* aOuterWindow,
nsPIDOMWindowInner* aInnerWindow) override {
AssertIsOnMainThread();
MOZ_ASSERT(aGlobal);
// Now we could have the correct window (if we are not window-less).
mClonedData.mParent = aInnerWindow;
mClonedData.mGlobal = aGlobal;
ProcessProfileData(aCx, mName, mAction);
mClonedData.mParent = nullptr;
mClonedData.mGlobal = nullptr;
}
virtual void ReleaseData() override {}

View File

@ -17,7 +17,6 @@
#include "mozilla/dom/TypedArray.h"
#include "mozilla/dom/WebCryptoCommon.h"
#include "mozilla/dom/WebCryptoTask.h"
#include "mozilla/dom/WebCryptoThreadPool.h"
#include "mozilla/dom/WorkerRef.h"
#include "mozilla/dom/WorkerPrivate.h"
@ -337,7 +336,13 @@ void WebCryptoTask::DispatchWithPromise(Promise* aResultPromise) {
MAYBE_EARLY_FAIL(mEarlyRv);
// dispatch to thread pool
mEarlyRv = WebCryptoThreadPool::Dispatch(this);
if (!EnsureNSSInitializedChromeOrContent()) {
mEarlyRv = NS_ERROR_FAILURE;
}
MAYBE_EARLY_FAIL(mEarlyRv);
mEarlyRv = NS_DispatchBackgroundTask(this);
MAYBE_EARLY_FAIL(mEarlyRv)
}

View File

@ -1,123 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/dom/WebCryptoThreadPool.h"
#include "MainThreadUtils.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsComponentManagerUtils.h"
#include "nsNSSComponent.h"
#include "nsXPCOMCIDInternal.h"
#include "nsXPCOMPrivate.h"
#include "nsIObserverService.h"
#include "nsThreadPool.h"
namespace mozilla {
namespace dom {
StaticRefPtr<WebCryptoThreadPool> gInstance;
NS_IMPL_ISUPPORTS(WebCryptoThreadPool, nsIObserver)
/* static */
void WebCryptoThreadPool::Initialize() {
MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
MOZ_ASSERT(!gInstance, "More than one instance!");
gInstance = new WebCryptoThreadPool();
NS_WARNING_ASSERTION(gInstance, "Failed create thread pool!");
if (gInstance && NS_FAILED(gInstance->Init())) {
NS_WARNING("Failed to initialize thread pool!");
gInstance = nullptr;
}
}
/* static */
nsresult WebCryptoThreadPool::Dispatch(nsIRunnable* aRunnable) {
if (gInstance) {
return gInstance->DispatchInternal(aRunnable);
}
// Fail if called on shutdown.
return NS_ERROR_FAILURE;
}
nsresult WebCryptoThreadPool::Init() {
MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
NS_ENSURE_TRUE(obs, NS_ERROR_FAILURE);
// Need this observer to know when to shut down the thread pool.
return obs->AddObserver(this, NS_XPCOM_SHUTDOWN_THREADS_OBSERVER_ID, false);
}
nsresult WebCryptoThreadPool::DispatchInternal(nsIRunnable* aRunnable) {
MutexAutoLock lock(mMutex);
if (mShutdown) {
return NS_ERROR_FAILURE;
}
if (!mPool) {
NS_ENSURE_TRUE(EnsureNSSInitializedChromeOrContent(), NS_ERROR_FAILURE);
nsCOMPtr<nsIThreadPool> pool(new nsThreadPool());
nsresult rv = pool->SetName(NS_LITERAL_CSTRING("SubtleCrypto"));
NS_ENSURE_SUCCESS(rv, rv);
pool.swap(mPool);
}
return mPool->Dispatch(aRunnable, NS_DISPATCH_NORMAL);
}
void WebCryptoThreadPool::Shutdown() {
MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
// Limit the scope of locking to avoid deadlocking if DispatchInternal ends
// up getting called during shutdown event processing.
nsCOMPtr<nsIThreadPool> pool;
{
MutexAutoLock lock(mMutex);
if (mShutdown) {
return;
}
pool = mPool;
mShutdown = true;
}
if (pool) {
pool->Shutdown();
}
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
NS_WARNING_ASSERTION(obs, "Failed to retrieve observer service!");
if (obs) {
if (NS_FAILED(
obs->RemoveObserver(this, NS_XPCOM_SHUTDOWN_THREADS_OBSERVER_ID))) {
NS_WARNING("Failed to remove shutdown observer!");
}
}
}
NS_IMETHODIMP
WebCryptoThreadPool::Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData) {
MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
if (gInstance) {
gInstance->Shutdown();
gInstance = nullptr;
}
return NS_OK;
}
} // namespace dom
} // namespace mozilla

View File

@ -1,47 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_WebCryptoThreadPool_h
#define mozilla_dom_WebCryptoThreadPool_h
#include "mozilla/Mutex.h"
#include "nsIObserver.h"
#include "nsIThreadPool.h"
namespace mozilla {
namespace dom {
class WebCryptoThreadPool final : nsIObserver {
public:
NS_DECL_THREADSAFE_ISUPPORTS
static void Initialize();
static nsresult Dispatch(nsIRunnable* aRunnable);
private:
WebCryptoThreadPool()
: mMutex("WebCryptoThreadPool::mMutex"),
mPool(nullptr),
mShutdown(false) {}
virtual ~WebCryptoThreadPool() {}
nsresult Init();
nsresult DispatchInternal(nsIRunnable* aRunnable);
void Shutdown();
NS_IMETHOD Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData) override;
mozilla::Mutex mMutex;
nsCOMPtr<nsIThreadPool> mPool;
bool mShutdown;
};
} // namespace dom
} // namespace mozilla
#endif // mozilla_dom_WebCryptoThreadPool_h

View File

@ -11,7 +11,6 @@ EXPORTS.mozilla.dom += [
'KeyAlgorithmProxy.h',
'WebCryptoCommon.h',
'WebCryptoTask.h',
'WebCryptoThreadPool.h'
]
UNIFIED_SOURCES += [
@ -19,7 +18,6 @@ UNIFIED_SOURCES += [
'CryptoKey.cpp',
'KeyAlgorithmProxy.cpp',
'WebCryptoTask.cpp',
'WebCryptoThreadPool.cpp',
]
include('/ipc/chromium/chromium-config.mozbuild')

View File

@ -285,14 +285,27 @@ already_AddRefed<File> DataTransferItem::GetAsFile(
if (RefPtr<Blob> blob = do_QueryObject(supports)) {
mCachedFile = blob->ToFile();
} else if (nsCOMPtr<BlobImpl> blobImpl = do_QueryInterface(supports)) {
MOZ_ASSERT(blobImpl->IsFile());
mCachedFile = File::Create(mDataTransfer, blobImpl);
} else if (nsCOMPtr<nsIFile> ifile = do_QueryInterface(supports)) {
mCachedFile = File::CreateFromFile(mDataTransfer, ifile);
} else {
MOZ_ASSERT(false, "One of the above code paths should be taken");
return nullptr;
nsCOMPtr<nsIGlobalObject> global = GetGlobalFromDataTransfer();
if (NS_WARN_IF(!global)) {
return nullptr;
}
if (nsCOMPtr<BlobImpl> blobImpl = do_QueryInterface(supports)) {
MOZ_ASSERT(blobImpl->IsFile());
mCachedFile = File::Create(global, blobImpl);
if (NS_WARN_IF(!mCachedFile)) {
return nullptr;
}
} else if (nsCOMPtr<nsIFile> ifile = do_QueryInterface(supports)) {
mCachedFile = File::CreateFromFile(global, ifile);
if (NS_WARN_IF(!mCachedFile)) {
return nullptr;
}
} else {
MOZ_ASSERT(false, "One of the above code paths should be taken");
return nullptr;
}
}
}
@ -307,20 +320,8 @@ already_AddRefed<FileSystemEntry> DataTransferItem::GetAsEntry(
return nullptr;
}
nsCOMPtr<nsIGlobalObject> global;
// This is annoying, but DataTransfer may have various things as parent.
nsCOMPtr<EventTarget> target =
do_QueryInterface(mDataTransfer->GetParentObject());
if (target) {
global = target->GetOwnerGlobal();
} else {
RefPtr<Event> event = do_QueryObject(mDataTransfer->GetParentObject());
if (event) {
global = event->GetParentObject();
}
}
if (!global) {
nsCOMPtr<nsIGlobalObject> global = GetGlobalFromDataTransfer();
if (NS_WARN_IF(!global)) {
return nullptr;
}
@ -389,7 +390,12 @@ already_AddRefed<File> DataTransferItem::CreateFileFromInputStream(
return nullptr;
}
return File::CreateMemoryFile(mDataTransfer, data, available, fileName, mType,
nsCOMPtr<nsIGlobalObject> global = GetGlobalFromDataTransfer();
if (NS_WARN_IF(!global)) {
return nullptr;
}
return File::CreateMemoryFile(global, data, available, fileName, mType,
PR_Now());
}
@ -550,5 +556,23 @@ already_AddRefed<nsIVariant> DataTransferItem::Data(nsIPrincipal* aPrincipal,
return variant.forget();
}
already_AddRefed<nsIGlobalObject>
DataTransferItem::GetGlobalFromDataTransfer() {
nsCOMPtr<nsIGlobalObject> global;
// This is annoying, but DataTransfer may have various things as parent.
nsCOMPtr<EventTarget> target =
do_QueryInterface(mDataTransfer->GetParentObject());
if (target) {
global = target->GetOwnerGlobal();
} else {
RefPtr<Event> event = do_QueryObject(mDataTransfer->GetParentObject());
if (event) {
global = event->GetParentObject();
}
}
return global.forget();
}
} // namespace dom
} // namespace mozilla

View File

@ -103,6 +103,8 @@ class DataTransferItem final : public nsISupports, public nsWrapperCache {
~DataTransferItem() {}
already_AddRefed<File> CreateFileFromInputStream(nsIInputStream* aStream);
already_AddRefed<nsIGlobalObject> GetGlobalFromDataTransfer();
// The index in the 2d mIndexedItems array
uint32_t mIndex;

View File

@ -3,6 +3,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Blob.h"
#include "EmptyBlobImpl.h"
#include "File.h"
#include "MemoryBlobImpl.h"
#include "mozilla/dom/BlobBinding.h"
@ -10,6 +11,7 @@
#include "mozilla/dom/WorkerCommon.h"
#include "mozilla/dom/WorkerPrivate.h"
#include "MultipartBlobImpl.h"
#include "nsIGlobalObject.h"
#include "nsIInputStream.h"
#include "nsPIDOMWindow.h"
#include "StreamBlobImpl.h"
@ -22,12 +24,12 @@ namespace dom {
NS_IMPL_CYCLE_COLLECTION_CLASS(Blob)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(Blob)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mGlobal)
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(Blob)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mGlobal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(Blob)
@ -63,36 +65,52 @@ void Blob::MakeValidBlobType(nsAString& aType) {
}
/* static */
Blob* Blob::Create(nsISupports* aParent, BlobImpl* aImpl) {
Blob* Blob::Create(nsIGlobalObject* aGlobal, BlobImpl* aImpl) {
MOZ_ASSERT(aImpl);
return aImpl->IsFile() ? new File(aParent, aImpl) : new Blob(aParent, aImpl);
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
return aImpl->IsFile() ? new File(aGlobal, aImpl) : new Blob(aGlobal, aImpl);
}
/* static */
already_AddRefed<Blob> Blob::CreateStringBlob(nsISupports* aParent,
already_AddRefed<Blob> Blob::CreateStringBlob(nsIGlobalObject* aGlobal,
const nsACString& aData,
const nsAString& aContentType) {
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<BlobImpl> blobImpl = StringBlobImpl::Create(aData, aContentType);
RefPtr<Blob> blob = Blob::Create(aParent, blobImpl);
RefPtr<Blob> blob = Blob::Create(aGlobal, blobImpl);
MOZ_ASSERT(!blob->mImpl->IsFile());
return blob.forget();
}
/* static */
already_AddRefed<Blob> Blob::CreateMemoryBlob(nsISupports* aParent,
already_AddRefed<Blob> Blob::CreateMemoryBlob(nsIGlobalObject* aGlobal,
void* aMemoryBuffer,
uint64_t aLength,
const nsAString& aContentType) {
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<Blob> blob = Blob::Create(
aParent, new MemoryBlobImpl(aMemoryBuffer, aLength, aContentType));
aGlobal, new MemoryBlobImpl(aMemoryBuffer, aLength, aContentType));
MOZ_ASSERT(!blob->mImpl->IsFile());
return blob.forget();
}
Blob::Blob(nsISupports* aParent, BlobImpl* aImpl)
: mImpl(aImpl), mParent(aParent) {
Blob::Blob(nsIGlobalObject* aGlobal, BlobImpl* aImpl)
: mImpl(aImpl), mGlobal(aGlobal) {
MOZ_ASSERT(mImpl);
MOZ_ASSERT(mGlobal);
}
Blob::~Blob() = default;
@ -112,7 +130,7 @@ already_AddRefed<File> Blob::ToFile() {
if (HasFileInterface()) {
file = static_cast<File*>(this);
} else {
file = new File(mParent, mImpl);
file = new File(mGlobal, mImpl);
}
return file.forget();
@ -131,7 +149,7 @@ already_AddRefed<File> Blob::ToFile(const nsAString& aName,
return nullptr;
}
RefPtr<File> file = new File(mParent, impl);
RefPtr<File> file = new File(mGlobal, impl);
return file.forget();
}
@ -144,7 +162,7 @@ already_AddRefed<Blob> Blob::CreateSlice(uint64_t aStart, uint64_t aLength,
return nullptr;
}
RefPtr<Blob> blob = Blob::Create(mParent, impl);
RefPtr<Blob> blob = Blob::Create(mGlobal, impl);
return blob.forget();
}
@ -170,7 +188,7 @@ already_AddRefed<Blob> Blob::Slice(const Optional<int64_t>& aStart,
return nullptr;
}
RefPtr<Blob> blob = Blob::Create(mParent, impl);
RefPtr<Blob> blob = Blob::Create(mGlobal, impl);
return blob.forget();
}
@ -210,7 +228,10 @@ already_AddRefed<Blob> Blob::Constructor(
MOZ_ASSERT(!impl->IsFile());
RefPtr<Blob> blob = Blob::Create(aGlobal.GetAsSupports(), impl);
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
MOZ_ASSERT(global);
RefPtr<Blob> blob = Blob::Create(global, impl);
return blob.forget();
}
@ -243,8 +264,7 @@ already_AddRefed<Promise> Blob::ArrayBuffer(ErrorResult& aRv) {
already_AddRefed<Promise> Blob::ConsumeBody(
BodyConsumer::ConsumeType aConsumeType, ErrorResult& aRv) {
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(mParent);
if (NS_WARN_IF(!global)) {
if (NS_WARN_IF(!mGlobal)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
@ -255,7 +275,7 @@ already_AddRefed<Promise> Blob::ConsumeBody(
MOZ_ASSERT(workerPrivate);
mainThreadEventTarget = workerPrivate->MainThreadEventTarget();
} else {
mainThreadEventTarget = global->EventTargetFor(TaskCategory::Other);
mainThreadEventTarget = mGlobal->EventTargetFor(TaskCategory::Other);
}
MOZ_ASSERT(mainThreadEventTarget);
@ -266,7 +286,7 @@ already_AddRefed<Promise> Blob::ConsumeBody(
return nullptr;
}
return BodyConsumer::Create(global, mainThreadEventTarget, inputStream,
return BodyConsumer::Create(mGlobal, mainThreadEventTarget, inputStream,
nullptr, aConsumeType, VoidCString(),
VoidString(), VoidCString(),
MutableBlobStorage::eOnlyInMemory, aRv);
@ -335,15 +355,14 @@ void Blob::Stream(JSContext* aCx, JS::MutableHandle<JSObject*> aStream,
return;
}
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(GetParentObject());
if (NS_WARN_IF(!global)) {
if (NS_WARN_IF(!mGlobal)) {
aRv.Throw(NS_ERROR_FAILURE);
return;
}
RefPtr<BlobBodyStreamHolder> holder = new BlobBodyStreamHolder();
BodyStream::Create(aCx, holder, global, stream, aRv);
BodyStream::Create(aCx, holder, mGlobal, stream, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return;
}

View File

@ -15,6 +15,7 @@
#include "nsWrapperCache.h"
#include "nsWeakReference.h"
class nsIGlobalObject;
class nsIInputStream;
namespace mozilla {
@ -41,16 +42,16 @@ class Blob : public nsSupportsWeakReference, public nsWrapperCache {
typedef OwningArrayBufferViewOrArrayBufferOrBlobOrUSVString BlobPart;
// This creates a Blob or a File based on the type of BlobImpl.
static Blob* Create(nsISupports* aParent, BlobImpl* aImpl);
static Blob* Create(nsIGlobalObject* aGlobal, BlobImpl* aImpl);
static already_AddRefed<Blob> CreateStringBlob(nsISupports* aParent,
static already_AddRefed<Blob> CreateStringBlob(nsIGlobalObject* aGlobal,
const nsACString& aData,
const nsAString& aContentType);
// The returned Blob takes ownership of aMemoryBuffer. aMemoryBuffer will be
// freed by free so it must be allocated by malloc or something
// compatible with it.
static already_AddRefed<Blob> CreateMemoryBlob(nsISupports* aParent,
static already_AddRefed<Blob> CreateMemoryBlob(nsIGlobalObject* aGlobal,
void* aMemoryBuffer,
uint64_t aLength,
const nsAString& aContentType);
@ -87,7 +88,7 @@ class Blob : public nsSupportsWeakReference, public nsWrapperCache {
static void MakeValidBlobType(nsAString& aType);
// WebIDL methods
nsISupports* GetParentObject() const { return mParent; }
nsIGlobalObject* GetParentObject() const { return mGlobal; }
bool IsMemoryFile() const;
@ -122,7 +123,7 @@ class Blob : public nsSupportsWeakReference, public nsWrapperCache {
protected:
// File constructor should never be used directly. Use Blob::Create instead.
Blob(nsISupports* aParent, BlobImpl* aImpl);
Blob(nsIGlobalObject* aGlobal, BlobImpl* aImpl);
virtual ~Blob();
virtual bool HasFileInterface() const { return false; }
@ -137,7 +138,7 @@ class Blob : public nsSupportsWeakReference, public nsWrapperCache {
RefPtr<BlobImpl> mImpl;
private:
nsCOMPtr<nsISupports> mParent;
nsCOMPtr<nsIGlobalObject> mGlobal;
};
NS_DEFINE_STATIC_IID_ACCESSOR(Blob, NS_DOM_BLOB_IID)

View File

@ -16,61 +16,88 @@
namespace mozilla {
namespace dom {
File::File(nsISupports* aParent, BlobImpl* aImpl) : Blob(aParent, aImpl) {
File::File(nsIGlobalObject* aGlobal, BlobImpl* aImpl) : Blob(aGlobal, aImpl) {
MOZ_ASSERT(aImpl->IsFile());
}
File::~File() {}
/* static */
File* File::Create(nsISupports* aParent, BlobImpl* aImpl) {
File* File::Create(nsIGlobalObject* aGlobal, BlobImpl* aImpl) {
MOZ_ASSERT(aImpl);
MOZ_ASSERT(aImpl->IsFile());
return new File(aParent, aImpl);
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
return new File(aGlobal, aImpl);
}
/* static */
already_AddRefed<File> File::Create(nsISupports* aParent,
already_AddRefed<File> File::Create(nsIGlobalObject* aGlobal,
const nsAString& aName,
const nsAString& aContentType,
uint64_t aLength,
int64_t aLastModifiedDate) {
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<File> file = new File(
aParent, new BaseBlobImpl(NS_LITERAL_STRING("BaseBlobImpl"), aName,
aGlobal, new BaseBlobImpl(NS_LITERAL_STRING("BaseBlobImpl"), aName,
aContentType, aLength, aLastModifiedDate));
return file.forget();
}
/* static */
already_AddRefed<File> File::CreateMemoryFile(nsISupports* aParent,
already_AddRefed<File> File::CreateMemoryFile(nsIGlobalObject* aGlobal,
void* aMemoryBuffer,
uint64_t aLength,
const nsAString& aName,
const nsAString& aContentType,
int64_t aLastModifiedDate) {
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<File> file =
new File(aParent, new MemoryBlobImpl(aMemoryBuffer, aLength, aName,
new File(aGlobal, new MemoryBlobImpl(aMemoryBuffer, aLength, aName,
aContentType, aLastModifiedDate));
return file.forget();
}
/* static */
already_AddRefed<File> File::CreateFromFile(nsISupports* aParent,
already_AddRefed<File> File::CreateFromFile(nsIGlobalObject* aGlobal,
nsIFile* aFile) {
MOZ_DIAGNOSTIC_ASSERT(XRE_IsParentProcess());
RefPtr<File> file = new File(aParent, new FileBlobImpl(aFile));
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<File> file = new File(aGlobal, new FileBlobImpl(aFile));
return file.forget();
}
/* static */
already_AddRefed<File> File::CreateFromFile(nsISupports* aParent,
already_AddRefed<File> File::CreateFromFile(nsIGlobalObject* aGlobal,
nsIFile* aFile,
const nsAString& aName,
const nsAString& aContentType) {
MOZ_DIAGNOSTIC_ASSERT(XRE_IsParentProcess());
MOZ_ASSERT(aGlobal);
if (NS_WARN_IF(!aGlobal)) {
return nullptr;
}
RefPtr<File> file =
new File(aParent, new FileBlobImpl(aFile, aName, aContentType));
new File(aGlobal, new FileBlobImpl(aFile, aName, aContentType));
return file.forget();
}
@ -130,7 +157,10 @@ already_AddRefed<File> File::Constructor(const GlobalObject& aGlobal,
impl->SetLastModified(aBag.mLastModified.Value());
}
RefPtr<File> file = new File(aGlobal.GetAsSupports(), impl);
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
MOZ_ASSERT(global);
RefPtr<File> file = new File(global, impl);
return file.forget();
}
@ -141,6 +171,12 @@ already_AddRefed<Promise> File::CreateFromNsIFile(
ErrorResult& aRv) {
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
MOZ_ASSERT(global);
if (NS_WARN_IF(!global)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
RefPtr<Promise> promise =
FileCreatorHelper::CreateFile(global, aData, aBag, true, aRv);
return promise.forget();
@ -159,6 +195,12 @@ already_AddRefed<Promise> File::CreateFromFileName(
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
MOZ_ASSERT(global);
if (NS_WARN_IF(!global)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
RefPtr<Promise> promise =
FileCreatorHelper::CreateFile(global, file, aBag, false, aRv);
return promise.forget();

View File

@ -22,9 +22,9 @@ class File final : public Blob {
public:
// Note: BlobImpl must be a File in order to use this method.
// Check impl->IsFile().
static File* Create(nsISupports* aParent, BlobImpl* aImpl);
static File* Create(nsIGlobalObject* aGlobal, BlobImpl* aImpl);
static already_AddRefed<File> Create(nsISupports* aParent,
static already_AddRefed<File> Create(nsIGlobalObject* aGlobal,
const nsAString& aName,
const nsAString& aContentType,
uint64_t aLength,
@ -33,7 +33,7 @@ class File final : public Blob {
// The returned File takes ownership of aMemoryBuffer. aMemoryBuffer will be
// freed by free so it must be allocated by malloc or something
// compatible with it.
static already_AddRefed<File> CreateMemoryFile(nsISupports* aParent,
static already_AddRefed<File> CreateMemoryFile(nsIGlobalObject* aGlobal,
void* aMemoryBuffer,
uint64_t aLength,
const nsAString& aName,
@ -46,10 +46,10 @@ class File final : public Blob {
// order to use nsIMIMEService.
// Would be nice if we try to avoid to use this method outside the
// main-thread to avoid extra runnables.
static already_AddRefed<File> CreateFromFile(nsISupports* aParent,
static already_AddRefed<File> CreateFromFile(nsIGlobalObject* aGlobal,
nsIFile* aFile);
static already_AddRefed<File> CreateFromFile(nsISupports* aParent,
static already_AddRefed<File> CreateFromFile(nsIGlobalObject* aGlobal,
nsIFile* aFile,
const nsAString& aName,
const nsAString& aContentType);
@ -95,7 +95,7 @@ class File final : public Blob {
private:
// File constructor should never be used directly. Use Blob::Create or
// File::Create.
File(nsISupports* aParent, BlobImpl* aImpl);
File(nsIGlobalObject* aGlobal, BlobImpl* aImpl);
~File();
};

View File

@ -29,25 +29,26 @@ namespace {
class BlobCreationDoneRunnable final : public Runnable {
public:
BlobCreationDoneRunnable(MutableBlobStorage* aBlobStorage,
MutableBlobStorageCallback* aCallback, Blob* aBlob,
nsresult aRv)
MutableBlobStorageCallback* aCallback,
BlobImpl* aBlobImpl, nsresult aRv)
: Runnable("dom::BlobCreationDoneRunnable"),
mBlobStorage(aBlobStorage),
mCallback(aCallback),
mBlob(aBlob),
mBlobImpl(aBlobImpl),
mRv(aRv) {
MOZ_ASSERT(aBlobStorage);
MOZ_ASSERT(aCallback);
MOZ_ASSERT((NS_FAILED(aRv) && !aBlob) || (NS_SUCCEEDED(aRv) && aBlob));
MOZ_ASSERT((NS_FAILED(aRv) && !aBlobImpl) ||
(NS_SUCCEEDED(aRv) && aBlobImpl));
}
NS_IMETHOD
Run() override {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mBlobStorage);
mCallback->BlobStoreCompleted(mBlobStorage, mBlob, mRv);
mCallback->BlobStoreCompleted(mBlobStorage, mBlobImpl, mRv);
mCallback = nullptr;
mBlob = nullptr;
mBlobImpl = nullptr;
return NS_OK;
}
@ -58,13 +59,11 @@ class BlobCreationDoneRunnable final : public Runnable {
// correct thread.
NS_ProxyRelease("BlobCreationDoneRunnable::mCallback",
mBlobStorage->EventTarget(), mCallback.forget());
NS_ProxyRelease("BlobCreationDoneRunnable::mBlob",
mBlobStorage->EventTarget(), mBlob.forget());
}
RefPtr<MutableBlobStorage> mBlobStorage;
RefPtr<MutableBlobStorageCallback> mCallback;
RefPtr<Blob> mBlob;
RefPtr<BlobImpl> mBlobImpl;
nsresult mRv;
};
@ -189,12 +188,10 @@ class CreateBlobRunnable final : public Runnable,
NS_DECL_ISUPPORTS_INHERITED
CreateBlobRunnable(MutableBlobStorage* aBlobStorage,
already_AddRefed<nsISupports> aParent,
const nsACString& aContentType,
already_AddRefed<MutableBlobStorageCallback> aCallback)
: Runnable("dom::CreateBlobRunnable"),
mBlobStorage(aBlobStorage),
mParent(aParent),
mContentType(aContentType),
mCallback(aCallback) {
MOZ_ASSERT(!NS_IsMainThread());
@ -210,11 +207,8 @@ class CreateBlobRunnable final : public Runnable,
}
void OperationSucceeded(BlobImpl* aBlobImpl) override {
nsCOMPtr<nsISupports> parent(std::move(mParent));
RefPtr<MutableBlobStorageCallback> callback(std::move(mCallback));
RefPtr<Blob> blob = Blob::Create(parent, aBlobImpl);
callback->BlobStoreCompleted(mBlobStorage, blob, NS_OK);
callback->BlobStoreCompleted(mBlobStorage, aBlobImpl, NS_OK);
}
void OperationFailed(nsresult aRv) override {
@ -227,14 +221,11 @@ class CreateBlobRunnable final : public Runnable,
MOZ_ASSERT(mBlobStorage);
// If something when wrong, we still have to release data in the correct
// thread.
NS_ProxyRelease("CreateBlobRunnable::mParent", mBlobStorage->EventTarget(),
mParent.forget());
NS_ProxyRelease("CreateBlobRunnable::mCallback",
mBlobStorage->EventTarget(), mCallback.forget());
}
RefPtr<MutableBlobStorage> mBlobStorage;
nsCOMPtr<nsISupports> mParent;
nsCString mContentType;
RefPtr<MutableBlobStorageCallback> mCallback;
};
@ -245,12 +236,10 @@ NS_IMPL_ISUPPORTS_INHERITED0(CreateBlobRunnable, Runnable)
// it dispatches a CreateBlobRunnable to the main-thread.
class LastRunnable final : public Runnable {
public:
LastRunnable(MutableBlobStorage* aBlobStorage, nsISupports* aParent,
const nsACString& aContentType,
LastRunnable(MutableBlobStorage* aBlobStorage, const nsACString& aContentType,
MutableBlobStorageCallback* aCallback)
: Runnable("dom::LastRunnable"),
mBlobStorage(aBlobStorage),
mParent(aParent),
mContentType(aContentType),
mCallback(aCallback) {
MOZ_ASSERT(NS_IsMainThread());
@ -262,8 +251,8 @@ class LastRunnable final : public Runnable {
Run() override {
MOZ_ASSERT(!NS_IsMainThread());
RefPtr<Runnable> runnable = new CreateBlobRunnable(
mBlobStorage, mParent.forget(), mContentType, mCallback.forget());
RefPtr<Runnable> runnable =
new CreateBlobRunnable(mBlobStorage, mContentType, mCallback.forget());
return mBlobStorage->EventTarget()->Dispatch(runnable, NS_DISPATCH_NORMAL);
}
@ -272,14 +261,11 @@ class LastRunnable final : public Runnable {
MOZ_ASSERT(mBlobStorage);
// If something when wrong, we still have to release data in the correct
// thread.
NS_ProxyRelease("LastRunnable::mParent", mBlobStorage->EventTarget(),
mParent.forget());
NS_ProxyRelease("LastRunnable::mCallback", mBlobStorage->EventTarget(),
mCallback.forget());
}
RefPtr<MutableBlobStorage> mBlobStorage;
nsCOMPtr<nsISupports> mParent;
nsCString mContentType;
RefPtr<MutableBlobStorageCallback> mCallback;
};
@ -330,9 +316,8 @@ MutableBlobStorage::~MutableBlobStorage() {
}
}
void MutableBlobStorage::GetBlobWhenReady(
nsISupports* aParent, const nsACString& aContentType,
MutableBlobStorageCallback* aCallback) {
void MutableBlobStorage::GetBlobImplWhenReady(
const nsACString& aContentType, MutableBlobStorageCallback* aCallback) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aCallback);
@ -359,8 +344,7 @@ void MutableBlobStorage::GetBlobWhenReady(
// this is to go to the I/O thread and then we come back: the runnables are
// executed in order and this LastRunnable will be... the last one.
// This Runnable will also close the FD on the I/O thread.
RefPtr<Runnable> runnable =
new LastRunnable(this, aParent, aContentType, aCallback);
RefPtr<Runnable> runnable = new LastRunnable(this, aContentType, aCallback);
// If the dispatching fails, we are shutting down and it's fine to do not
// run the callback.
@ -370,7 +354,6 @@ void MutableBlobStorage::GetBlobWhenReady(
// If we are waiting for the temporary file, it's better to wait...
if (previousState == eWaitingForTemporaryFile) {
mPendingParent = aParent;
mPendingContentType = aContentType;
mPendingCallback = aCallback;
return;
@ -389,9 +372,8 @@ void MutableBlobStorage::GetBlobWhenReady(
blobImpl = new EmptyBlobImpl(NS_ConvertUTF8toUTF16(aContentType));
}
RefPtr<Blob> blob = Blob::Create(aParent, blobImpl);
RefPtr<BlobCreationDoneRunnable> runnable =
new BlobCreationDoneRunnable(this, aCallback, blob, NS_OK);
new BlobCreationDoneRunnable(this, aCallback, blobImpl, NS_OK);
nsresult error =
EventTarget()->Dispatch(runnable.forget(), NS_DISPATCH_NORMAL);
@ -589,19 +571,18 @@ void MutableBlobStorage::TemporaryFileCreated(PRFileDesc* aFD) {
return;
}
// If we are closed, it means that GetBlobWhenReady() has been called when we
// were already waiting for a temporary file-descriptor. Finally we are here,
// AdoptBuffer runnable is going to write the current buffer into this file.
// After that, there is nothing else to write, and we dispatch LastRunnable
// which ends up calling mPendingCallback via CreateBlobRunnable.
// If we are closed, it means that GetBlobImplWhenReady() has been called when
// we were already waiting for a temporary file-descriptor. Finally we are
// here, AdoptBuffer runnable is going to write the current buffer into this
// file. After that, there is nothing else to write, and we dispatch
// LastRunnable which ends up calling mPendingCallback via CreateBlobRunnable.
if (mStorageState == eClosed) {
MOZ_ASSERT(mPendingCallback);
RefPtr<Runnable> runnable = new LastRunnable(
this, mPendingParent, mPendingContentType, mPendingCallback);
RefPtr<Runnable> runnable =
new LastRunnable(this, mPendingContentType, mPendingCallback);
Unused << DispatchToIOThread(runnable.forget());
mPendingParent = nullptr;
mPendingCallback = nullptr;
}
}

View File

@ -29,8 +29,8 @@ class MutableBlobStorageCallback {
public:
NS_INLINE_DECL_PURE_VIRTUAL_REFCOUNTING
virtual void BlobStoreCompleted(MutableBlobStorage* aBlobStorage, Blob* aBlob,
nsresult aRv) = 0;
virtual void BlobStoreCompleted(MutableBlobStorage* aBlobStorage,
BlobImpl* aBlob, nsresult aRv) = 0;
};
// This class is must be created and used on main-thread, except for Append()
@ -51,9 +51,9 @@ class MutableBlobStorage final {
nsresult Append(const void* aData, uint32_t aLength);
// This method can be called just once.
// The callback will be called when the Blob is ready.
void GetBlobWhenReady(nsISupports* aParent, const nsACString& aContentType,
MutableBlobStorageCallback* aCallback);
// The callback will be called when the BlobImpl is ready.
void GetBlobImplWhenReady(const nsACString& aContentType,
MutableBlobStorageCallback* aCallback);
void TemporaryFileCreated(PRFileDesc* aFD);
@ -115,7 +115,6 @@ class MutableBlobStorage final {
RefPtr<TaskQueue> mTaskQueue;
nsCOMPtr<nsIEventTarget> mEventTarget;
nsCOMPtr<nsISupports> mPendingParent;
nsCString mPendingContentType;
RefPtr<MutableBlobStorageCallback> mPendingCallback;

View File

@ -12,10 +12,9 @@ namespace dom {
MutableBlobStreamListener::MutableBlobStreamListener(
MutableBlobStorage::MutableBlobStorageType aStorageType,
nsISupports* aParent, const nsACString& aContentType,
MutableBlobStorageCallback* aCallback, nsIEventTarget* aEventTarget)
const nsACString& aContentType, MutableBlobStorageCallback* aCallback,
nsIEventTarget* aEventTarget)
: mCallback(aCallback),
mParent(aParent),
mStorageType(aStorageType),
mContentType(aContentType),
mEventTarget(aEventTarget) {
@ -62,7 +61,7 @@ MutableBlobStreamListener::OnStopRequest(nsIRequest* aRequest,
return NS_OK;
}
storage->GetBlobWhenReady(mParent, mContentType, mCallback);
storage->GetBlobImplWhenReady(mContentType, mCallback);
return NS_OK;
}

View File

@ -24,7 +24,6 @@ class MutableBlobStreamListener final
NS_DECL_NSIREQUESTOBSERVER
MutableBlobStreamListener(MutableBlobStorage::MutableBlobStorageType aType,
nsISupports* aParent,
const nsACString& aContentType,
MutableBlobStorageCallback* aCallback,
nsIEventTarget* aEventTarget = nullptr);
@ -39,7 +38,6 @@ class MutableBlobStreamListener final
RefPtr<MutableBlobStorage> mStorage;
RefPtr<MutableBlobStorageCallback> mCallback;
nsCOMPtr<nsISupports> mParent;
MutableBlobStorage::MutableBlobStorageType mStorageType;
nsCString mContentType;
nsCOMPtr<nsIEventTarget> mEventTarget;

View File

@ -38,6 +38,11 @@ mozilla::ipc::IPCResult FileCreatorChild::Recv__delete__(
aResult.get_FileCreationSuccessResult().blob());
RefPtr<File> file = File::Create(promise->GetParentObject(), impl);
if (NS_WARN_IF(!file)) {
promise->MaybeReject(NS_ERROR_FAILURE);
return IPC_OK();
}
promise->MaybeResolve(file);
return IPC_OK();
}

View File

@ -12,6 +12,7 @@ EXPORTS.mozilla.dom += [
'Blob.h',
'BlobImpl.h',
'BlobSet.h',
'EmptyBlobImpl.h',
'File.h',
'FileBlobImpl.h',
'FileCreatorHelper.h',

View File

@ -25,7 +25,7 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(Directory)
tmp->mFileSystem->Unlink();
tmp->mFileSystem = nullptr;
}
NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mGlobal)
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
@ -33,7 +33,7 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(Directory)
if (tmp->mFileSystem) {
tmp->mFileSystem->Traverse(cb);
}
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mGlobal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(Directory)
@ -55,29 +55,35 @@ already_AddRefed<Directory> Directory::Constructor(const GlobalObject& aGlobal,
return nullptr;
}
return Create(aGlobal.GetAsSupports(), path);
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
if (NS_WARN_IF(!global)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
return Create(global, path);
}
/* static */
already_AddRefed<Directory> Directory::Create(nsISupports* aParent,
already_AddRefed<Directory> Directory::Create(nsIGlobalObject* aGlobal,
nsIFile* aFile,
FileSystemBase* aFileSystem) {
MOZ_ASSERT(aParent);
MOZ_ASSERT(aGlobal);
MOZ_ASSERT(aFile);
RefPtr<Directory> directory = new Directory(aParent, aFile, aFileSystem);
RefPtr<Directory> directory = new Directory(aGlobal, aFile, aFileSystem);
return directory.forget();
}
Directory::Directory(nsISupports* aParent, nsIFile* aFile,
Directory::Directory(nsIGlobalObject* aGlobal, nsIFile* aFile,
FileSystemBase* aFileSystem)
: mParent(aParent), mFile(aFile) {
: mGlobal(aGlobal), mFile(aFile) {
MOZ_ASSERT(aFile);
// aFileSystem can be null. In this case we create a OSFileSystem when needed.
if (aFileSystem) {
// More likely, this is a OSFileSystem. This object keeps a reference of
// mParent but it's not cycle collectable and to avoid manual
// mGlobal but it's not cycle collectable and to avoid manual
// addref/release, it's better to have 1 object per directory. For this
// reason we clone it here.
mFileSystem = aFileSystem->Clone();
@ -86,7 +92,7 @@ Directory::Directory(nsISupports* aParent, nsIFile* aFile,
Directory::~Directory() {}
nsISupports* Directory::GetParentObject() const { return mParent; }
nsIGlobalObject* Directory::GetParentObject() const { return mGlobal; }
JSObject* Directory::WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) {
@ -181,7 +187,7 @@ FileSystemBase* Directory::GetFileSystem(ErrorResult& aRv) {
}
RefPtr<OSFileSystem> fs = new OSFileSystem(path);
fs->Init(mParent);
fs->Init(mGlobal);
mFileSystem = fs;
}

View File

@ -28,13 +28,13 @@ class Directory final : public nsISupports, public nsWrapperCache {
const nsAString& aRealPath,
ErrorResult& aRv);
static already_AddRefed<Directory> Create(nsISupports* aParent,
static already_AddRefed<Directory> Create(nsIGlobalObject* aGlobal,
nsIFile* aDirectory,
FileSystemBase* aFileSystem = 0);
// ========= Begin WebIDL bindings. ===========
nsISupports* GetParentObject() const;
nsIGlobalObject* GetParentObject() const;
virtual JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
@ -84,7 +84,7 @@ class Directory final : public nsISupports, public nsWrapperCache {
nsIFile* GetInternalNsIFile() const { return mFile; }
private:
Directory(nsISupports* aParent, nsIFile* aFile,
Directory(nsIGlobalObject* aGlobal, nsIFile* aFile,
FileSystemBase* aFileSystem = nullptr);
~Directory();
@ -93,7 +93,7 @@ class Directory final : public nsISupports, public nsWrapperCache {
*/
nsresult DOMPathToRealPath(const nsAString& aPath, nsIFile** aFile) const;
nsCOMPtr<nsISupports> mParent;
nsCOMPtr<nsIGlobalObject> mGlobal;
RefPtr<FileSystemBase> mFileSystem;
nsCOMPtr<nsIFile> mFile;

View File

@ -19,7 +19,7 @@ void FileSystemBase::Shutdown() {
mShutdown = true;
}
nsISupports* FileSystemBase::GetParentObject() const {
nsIGlobalObject* FileSystemBase::GetParentObject() const {
AssertIsOnOwningThread();
return nullptr;
}

View File

@ -28,7 +28,7 @@ class FileSystemBase {
virtual bool ShouldCreateDirectory() = 0;
virtual nsISupports* GetParentObject() const;
virtual nsIGlobalObject* GetParentObject() const;
virtual void GetDirectoryName(nsIFile* aFile, nsAString& aRetval,
ErrorResult& aRv) const;

View File

@ -34,12 +34,8 @@ GetDirectoryListingTaskChild::Create(FileSystemBase* aFileSystem,
MOZ_ASSERT(aDirectory);
aFileSystem->AssertIsOnOwningThread();
nsCOMPtr<nsIGlobalObject> globalObject =
do_QueryInterface(aFileSystem->GetParentObject());
if (NS_WARN_IF(!globalObject)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
nsCOMPtr<nsIGlobalObject> globalObject = aFileSystem->GetParentObject();
MOZ_ASSERT(globalObject);
RefPtr<GetDirectoryListingTaskChild> task = new GetDirectoryListingTaskChild(
globalObject, aFileSystem, aDirectory, aTargetPath, aFilters);
@ -120,8 +116,10 @@ void GetDirectoryListingTaskChild::SetSuccessRequestResult(
RefPtr<BlobImpl> blobImpl = IPCBlobUtils::Deserialize(d.blob());
MOZ_ASSERT(blobImpl);
RefPtr<File> file =
File::Create(mFileSystem->GetParentObject(), blobImpl);
nsCOMPtr<nsIGlobalObject> globalObject = mFileSystem->GetParentObject();
MOZ_ASSERT(globalObject);
RefPtr<File> file = File::Create(globalObject, blobImpl);
MOZ_ASSERT(file);
ofd->SetAsFile() = file;

View File

@ -28,8 +28,7 @@ GetFileOrDirectoryTaskChild::Create(FileSystemBase* aFileSystem,
MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread!");
MOZ_ASSERT(aFileSystem);
nsCOMPtr<nsIGlobalObject> globalObject =
do_QueryInterface(aFileSystem->GetParentObject());
nsCOMPtr<nsIGlobalObject> globalObject = aFileSystem->GetParentObject();
if (NS_WARN_IF(!globalObject)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
@ -89,8 +88,13 @@ void GetFileOrDirectoryTaskChild::SetSuccessRequestResult(
RefPtr<BlobImpl> blobImpl = IPCBlobUtils::Deserialize(r.blob());
MOZ_ASSERT(blobImpl);
mResultFile = File::Create(mFileSystem->GetParentObject(), blobImpl);
MOZ_ASSERT(mResultFile);
nsCOMPtr<nsIGlobalObject> globalObject = mFileSystem->GetParentObject();
MOZ_ASSERT(globalObject);
mResultFile = File::Create(globalObject, blobImpl);
if (NS_WARN_IF(!mResultFile)) {
aRv.Throw(NS_ERROR_FAILURE);
}
break;
}
case FileSystemResponseValue::TFileSystemDirectoryResponse: {

View File

@ -22,16 +22,13 @@ class ReleaseRunnable final : public Runnable {
public:
static void MaybeReleaseOnMainThread(
nsTArray<RefPtr<Promise>>& aPromises,
nsTArray<RefPtr<GetFilesCallback>>& aCallbacks,
Sequence<RefPtr<File>>& aFiles,
already_AddRefed<nsIGlobalObject> aGlobal) {
nsCOMPtr<nsIGlobalObject> global(aGlobal);
nsTArray<RefPtr<GetFilesCallback>>& aCallbacks) {
if (NS_IsMainThread()) {
return;
}
RefPtr<ReleaseRunnable> runnable =
new ReleaseRunnable(aPromises, aCallbacks, aFiles, global.forget());
new ReleaseRunnable(aPromises, aCallbacks);
FileSystemUtils::DispatchRunnable(nullptr, runnable.forget());
}
@ -41,28 +38,20 @@ class ReleaseRunnable final : public Runnable {
mPromises.Clear();
mCallbacks.Clear();
mFiles.Clear();
mGlobal = nullptr;
return NS_OK;
}
private:
ReleaseRunnable(nsTArray<RefPtr<Promise>>& aPromises,
nsTArray<RefPtr<GetFilesCallback>>& aCallbacks,
Sequence<RefPtr<File>>& aFiles,
already_AddRefed<nsIGlobalObject> aGlobal)
nsTArray<RefPtr<GetFilesCallback>>& aCallbacks)
: Runnable("dom::ReleaseRunnable") {
mPromises.SwapElements(aPromises);
mCallbacks.SwapElements(aCallbacks);
mFiles.SwapElements(aFiles);
mGlobal = aGlobal;
}
nsTArray<RefPtr<Promise>> mPromises;
nsTArray<RefPtr<GetFilesCallback>> mCallbacks;
Sequence<RefPtr<File>> mFiles;
nsCOMPtr<nsIGlobalObject> mGlobal;
};
} // namespace
@ -71,15 +60,14 @@ class ReleaseRunnable final : public Runnable {
// GetFilesHelper Base class
already_AddRefed<GetFilesHelper> GetFilesHelper::Create(
nsIGlobalObject* aGlobal,
const nsTArray<OwningFileOrDirectory>& aFilesOrDirectory,
bool aRecursiveFlag, ErrorResult& aRv) {
RefPtr<GetFilesHelper> helper;
if (XRE_IsParentProcess()) {
helper = new GetFilesHelper(aGlobal, aRecursiveFlag);
helper = new GetFilesHelper(aRecursiveFlag);
} else {
helper = new GetFilesHelperChild(aGlobal, aRecursiveFlag);
helper = new GetFilesHelperChild(aRecursiveFlag);
}
nsAutoString directoryPath;
@ -87,7 +75,8 @@ already_AddRefed<GetFilesHelper> GetFilesHelper::Create(
for (uint32_t i = 0; i < aFilesOrDirectory.Length(); ++i) {
const OwningFileOrDirectory& data = aFilesOrDirectory[i];
if (data.IsFile()) {
if (!helper->mFiles.AppendElement(data.GetAsFile(), fallible)) {
if (!helper->mTargetBlobImplArray.AppendElement(data.GetAsFile()->Impl(),
fallible)) {
aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
return nullptr;
}
@ -115,7 +104,7 @@ already_AddRefed<GetFilesHelper> GetFilesHelper::Create(
return helper.forget();
}
MOZ_ASSERT(helper->mFiles.IsEmpty());
MOZ_ASSERT(helper->mTargetBlobImplArray.IsEmpty());
helper->SetDirectoryPath(directoryPath);
helper->Work(aRv);
@ -126,18 +115,16 @@ already_AddRefed<GetFilesHelper> GetFilesHelper::Create(
return helper.forget();
}
GetFilesHelper::GetFilesHelper(nsIGlobalObject* aGlobal, bool aRecursiveFlag)
GetFilesHelper::GetFilesHelper(bool aRecursiveFlag)
: Runnable("GetFilesHelper"),
GetFilesHelperBase(aRecursiveFlag),
mGlobal(aGlobal),
mListingCompleted(false),
mErrorResult(NS_OK),
mMutex("GetFilesHelper::mMutex"),
mCanceled(false) {}
GetFilesHelper::~GetFilesHelper() {
ReleaseRunnable::MaybeReleaseOnMainThread(mPromises, mCallbacks, mFiles,
mGlobal.forget());
ReleaseRunnable::MaybeReleaseOnMainThread(mPromises, mCallbacks);
}
void GetFilesHelper::AddPromise(Promise* aPromise) {
@ -167,8 +154,6 @@ void GetFilesHelper::AddCallback(GetFilesCallback* aCallback) {
}
void GetFilesHelper::Unlink() {
mGlobal = nullptr;
mFiles.Clear();
mPromises.Clear();
mCallbacks.Clear();
@ -182,8 +167,6 @@ void GetFilesHelper::Unlink() {
void GetFilesHelper::Traverse(nsCycleCollectionTraversalCallback& cb) {
GetFilesHelper* tmp = this;
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mGlobal);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mFiles);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPromises);
}
@ -221,8 +204,6 @@ GetFilesHelper::Run() {
return NS_OK;
}
RunMainThread();
OperationCompleted();
return NS_OK;
}
@ -272,29 +253,6 @@ void GetFilesHelper::RunIO() {
mErrorResult = ExploreDirectory(domPath, file);
}
void GetFilesHelper::RunMainThread() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mDirectoryPath.IsEmpty());
MOZ_ASSERT(!mListingCompleted);
// If there is an error, do nothing.
if (NS_FAILED(mErrorResult)) {
return;
}
// Create the sequence of Files.
for (uint32_t i = 0; i < mTargetBlobImplArray.Length(); ++i) {
RefPtr<File> domFile = File::Create(mGlobal, mTargetBlobImplArray[i]);
MOZ_ASSERT(domFile);
if (!mFiles.AppendElement(domFile, fallible)) {
mErrorResult = NS_ERROR_OUT_OF_MEMORY;
mFiles.Clear();
return;
}
}
}
nsresult GetFilesHelperBase::ExploreDirectory(const nsAString& aDOMPath,
nsIFile* aFile) {
MOZ_ASSERT(!NS_IsMainThread());
@ -439,13 +397,33 @@ void GetFilesHelper::ResolveOrRejectPromise(Promise* aPromise) {
MOZ_ASSERT(mListingCompleted);
MOZ_ASSERT(aPromise);
Sequence<RefPtr<File>> files;
if (NS_SUCCEEDED(mErrorResult)) {
for (uint32_t i = 0; i < mTargetBlobImplArray.Length(); ++i) {
RefPtr<File> domFile =
File::Create(aPromise->GetParentObject(), mTargetBlobImplArray[i]);
if (NS_WARN_IF(!domFile)) {
mErrorResult = NS_ERROR_FAILURE;
files.Clear();
break;
}
if (!files.AppendElement(domFile, fallible)) {
mErrorResult = NS_ERROR_OUT_OF_MEMORY;
files.Clear();
break;
}
}
}
// Error propagation.
if (NS_FAILED(mErrorResult)) {
aPromise->MaybeReject(mErrorResult);
return;
}
aPromise->MaybeResolve(mFiles);
aPromise->MaybeResolve(files);
}
void GetFilesHelper::RunCallback(GetFilesCallback* aCallback) {
@ -453,7 +431,7 @@ void GetFilesHelper::RunCallback(GetFilesCallback* aCallback) {
MOZ_ASSERT(mListingCompleted);
MOZ_ASSERT(aCallback);
aCallback->Callback(mErrorResult, mFiles);
aCallback->Callback(mErrorResult, mTargetBlobImplArray);
}
///////////////////////////////////////////////////////////////////////////////
@ -494,10 +472,7 @@ bool GetFilesHelperChild::AppendBlobImpl(BlobImpl* aBlobImpl) {
MOZ_ASSERT(aBlobImpl);
MOZ_ASSERT(aBlobImpl->IsFile());
RefPtr<File> file = File::Create(mGlobal, aBlobImpl);
MOZ_ASSERT(file);
return mFiles.AppendElement(file, fallible);
return mTargetBlobImplArray.AppendElement(aBlobImpl, fallible);
}
void GetFilesHelperChild::Finished(nsresult aError) {
@ -521,7 +496,7 @@ class GetFilesHelperParentCallback final : public GetFilesCallback {
}
void Callback(nsresult aStatus,
const Sequence<RefPtr<File>>& aFiles) override {
const FallibleTArray<RefPtr<BlobImpl>>& aBlobImpls) override {
if (NS_FAILED(aStatus)) {
mParent->mContentParent->SendGetFilesResponseAndForget(
mParent->mUUID, GetFilesResponseFailure(aStatus));
@ -531,11 +506,11 @@ class GetFilesHelperParentCallback final : public GetFilesCallback {
GetFilesResponseSuccess success;
nsTArray<IPCBlob>& ipcBlobs = success.blobs();
ipcBlobs.SetLength(aFiles.Length());
ipcBlobs.SetLength(aBlobImpls.Length());
for (uint32_t i = 0; i < aFiles.Length(); ++i) {
for (uint32_t i = 0; i < aBlobImpls.Length(); ++i) {
nsresult rv = IPCBlobUtils::Serialize(
aFiles[i]->Impl(), mParent->mContentParent, ipcBlobs[i]);
aBlobImpls[i], mParent->mContentParent, ipcBlobs[i]);
if (NS_WARN_IF(NS_FAILED(rv))) {
mParent->mContentParent->SendGetFilesResponseAndForget(
mParent->mUUID, GetFilesResponseFailure(NS_ERROR_OUT_OF_MEMORY));
@ -555,7 +530,7 @@ class GetFilesHelperParentCallback final : public GetFilesCallback {
GetFilesHelperParent::GetFilesHelperParent(const nsID& aUUID,
ContentParent* aContentParent,
bool aRecursiveFlag)
: GetFilesHelper(nullptr, aRecursiveFlag),
: GetFilesHelper(aRecursiveFlag),
mContentParent(aContentParent),
mUUID(aUUID) {}

View File

@ -28,7 +28,7 @@ class GetFilesCallback {
NS_INLINE_DECL_REFCOUNTING(GetFilesCallback);
virtual void Callback(nsresult aStatus,
const Sequence<RefPtr<File>>& aFiles) = 0;
const FallibleTArray<RefPtr<BlobImpl>>& aBlobImpls) = 0;
protected:
virtual ~GetFilesCallback() {}
@ -63,7 +63,6 @@ class GetFilesHelper : public Runnable, public GetFilesHelperBase {
public:
static already_AddRefed<GetFilesHelper> Create(
nsIGlobalObject* aGlobal,
const nsTArray<OwningFileOrDirectory>& aFilesOrDirectory,
bool aRecursiveFlag, ErrorResult& aRv);
@ -76,7 +75,7 @@ class GetFilesHelper : public Runnable, public GetFilesHelperBase {
void Traverse(nsCycleCollectionTraversalCallback& cb);
protected:
GetFilesHelper(nsIGlobalObject* aGlobal, bool aRecursiveFlag);
explicit GetFilesHelper(bool aRecursiveFlag);
virtual ~GetFilesHelper();
@ -98,22 +97,15 @@ class GetFilesHelper : public Runnable, public GetFilesHelperBase {
void RunIO();
void RunMainThread();
void OperationCompleted();
void ResolveOrRejectPromise(Promise* aPromise);
void RunCallback(GetFilesCallback* aCallback);
nsCOMPtr<nsIGlobalObject> mGlobal;
bool mListingCompleted;
nsString mDirectoryPath;
// This is the real File sequence that we expose via Promises.
Sequence<RefPtr<File>> mFiles;
// Error code to propagate.
nsresult mErrorResult;
@ -128,8 +120,8 @@ class GetFilesHelper : public Runnable, public GetFilesHelperBase {
class GetFilesHelperChild final : public GetFilesHelper {
public:
GetFilesHelperChild(nsIGlobalObject* aGlobal, bool aRecursiveFlag)
: GetFilesHelper(aGlobal, aRecursiveFlag), mPendingOperation(false) {}
explicit GetFilesHelperChild(bool aRecursiveFlag)
: GetFilesHelper(aRecursiveFlag), mPendingOperation(false) {}
virtual void Work(ErrorResult& aRv) override;

View File

@ -30,8 +30,7 @@ already_AddRefed<GetFilesTaskChild> GetFilesTaskChild::Create(
MOZ_ASSERT(aDirectory);
aFileSystem->AssertIsOnOwningThread();
nsCOMPtr<nsIGlobalObject> globalObject =
do_QueryInterface(aFileSystem->GetParentObject());
nsCOMPtr<nsIGlobalObject> globalObject = aFileSystem->GetParentObject();
if (NS_WARN_IF(!globalObject)) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
@ -105,12 +104,19 @@ void GetFilesTaskChild::SetSuccessRequestResult(
return;
}
nsCOMPtr<nsIGlobalObject> globalObject = mFileSystem->GetParentObject();
MOZ_ASSERT(globalObject);
for (uint32_t i = 0; i < r.data().Length(); ++i) {
const FileSystemFileResponse& data = r.data()[i];
RefPtr<BlobImpl> blobImpl = IPCBlobUtils::Deserialize(data.blob());
MOZ_ASSERT(blobImpl);
mTargetData[i] = File::Create(mFileSystem->GetParentObject(), blobImpl);
mTargetData[i] = File::Create(globalObject, blobImpl);
if (NS_WARN_IF(!mTargetData[i])) {
aRv.Throw(NS_ERROR_FAILURE);
return;
}
}
}

View File

@ -23,29 +23,24 @@ already_AddRefed<FileSystemBase> OSFileSystem::Clone() {
AssertIsOnOwningThread();
RefPtr<OSFileSystem> fs = new OSFileSystem(mLocalRootPath);
if (mParent) {
fs->Init(mParent);
if (mGlobal) {
fs->Init(mGlobal);
}
return fs.forget();
}
void OSFileSystem::Init(nsISupports* aParent) {
void OSFileSystem::Init(nsIGlobalObject* aGlobal) {
AssertIsOnOwningThread();
MOZ_ASSERT(!mParent, "No duple Init() calls");
MOZ_ASSERT(aParent);
MOZ_ASSERT(!mGlobal, "No duple Init() calls");
MOZ_ASSERT(aGlobal);
mParent = aParent;
#ifdef DEBUG
nsCOMPtr<nsIGlobalObject> obj = do_QueryInterface(aParent);
MOZ_ASSERT(obj);
#endif
mGlobal = aGlobal;
}
nsISupports* OSFileSystem::GetParentObject() const {
nsIGlobalObject* OSFileSystem::GetParentObject() const {
AssertIsOnOwningThread();
return mParent;
return mGlobal;
}
bool OSFileSystem::IsSafeFile(nsIFile* aFile) const {
@ -66,14 +61,14 @@ bool OSFileSystem::IsSafeDirectory(Directory* aDir) const {
void OSFileSystem::Unlink() {
AssertIsOnOwningThread();
mParent = nullptr;
mGlobal = nullptr;
}
void OSFileSystem::Traverse(nsCycleCollectionTraversalCallback& cb) {
AssertIsOnOwningThread();
OSFileSystem* tmp = this;
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mGlobal);
}
void OSFileSystem::SerializeDOMPath(nsAString& aOutput) const {

View File

@ -14,7 +14,7 @@ class OSFileSystem final : public FileSystemBase {
public:
explicit OSFileSystem(const nsAString& aRootDir);
void Init(nsISupports* aParent);
void Init(nsIGlobalObject* aGlobal);
// Overrides FileSystemBase
@ -27,7 +27,7 @@ class OSFileSystem final : public FileSystemBase {
return false;
}
virtual nsISupports* GetParentObject() const override;
virtual nsIGlobalObject* GetParentObject() const override;
virtual bool IsSafeFile(nsIFile* aFile) const override;
@ -42,7 +42,7 @@ class OSFileSystem final : public FileSystemBase {
private:
virtual ~OSFileSystem() {}
nsCOMPtr<nsISupports> mParent;
nsCOMPtr<nsIGlobalObject> mGlobal;
};
class OSFileSystemParent final : public FileSystemBase {
@ -58,7 +58,7 @@ class OSFileSystemParent final : public FileSystemBase {
virtual bool ShouldCreateDirectory() override { return false; }
virtual nsISupports* GetParentObject() const override {
virtual nsIGlobalObject* GetParentObject() const override {
MOZ_CRASH("This should not be called on the PBackground thread.");
return nullptr;
}

View File

@ -28,8 +28,6 @@ class FileCallbackRunnable final : public Runnable {
// Here we clone the File object.
RefPtr<File> file = File::Create(mFile->GetParentObject(), mFile->Impl());
MOZ_ASSERT(file);
mCallback->Call(*file);
return NS_OK;
}

View File

@ -879,8 +879,11 @@ nsresult HTMLCanvasElement::MozGetAsFileImpl(const nsAString& aName,
do_QueryInterface(OwnerDoc()->GetScopeObject());
// The File takes ownership of the buffer
RefPtr<File> file =
File::CreateMemoryFile(win, imgData, imgSize, aName, type, PR_Now());
RefPtr<File> file = File::CreateMemoryFile(win->AsGlobal(), imgData, imgSize,
aName, type, PR_Now());
if (NS_WARN_IF(!file)) {
return NS_ERROR_FAILURE;
}
file.forget(aResult);
return NS_OK;

View File

@ -227,12 +227,23 @@ class DispatchChangeEventCallback final : public GetFilesCallback {
MOZ_ASSERT(aInputElement);
}
virtual void Callback(nsresult aStatus,
const Sequence<RefPtr<File>>& aFiles) override {
virtual void Callback(
nsresult aStatus,
const FallibleTArray<RefPtr<BlobImpl>>& aBlobImpls) override {
if (!mInputElement->GetOwnerGlobal()) {
return;
}
nsTArray<OwningFileOrDirectory> array;
for (uint32_t i = 0; i < aFiles.Length(); ++i) {
for (uint32_t i = 0; i < aBlobImpls.Length(); ++i) {
OwningFileOrDirectory* element = array.AppendElement();
element->SetAsFile() = aFiles[i];
RefPtr<File> file =
File::Create(mInputElement->GetOwnerGlobal(), aBlobImpls[i]);
if (NS_WARN_IF(!file)) {
return;
}
element->SetAsFile() = file;
}
mInputElement->SetFilesOrDirectories(array, true);
@ -522,6 +533,11 @@ HTMLInputElement::nsFilePickerShownCallback::Done(int16_t aResult) {
// So, we can safely send one by ourself.
mInput->SetFilesOrDirectories(newFilesOrDirectories, true);
// mInput(HTMLInputElement) has no scriptGlobalObject, don't create
// DispatchChangeEventCallback
if (!mInput->GetOwnerGlobal()) {
return NS_OK;
}
RefPtr<DispatchChangeEventCallback> dispatchChangeEventCallback =
new DispatchChangeEventCallback(mInput);
@ -1989,7 +2005,9 @@ void HTMLInputElement::MozSetFileArray(
nsTArray<OwningFileOrDirectory> files;
for (uint32_t i = 0; i < aFiles.Length(); ++i) {
RefPtr<File> file = File::Create(global, aFiles[i].get()->Impl());
MOZ_ASSERT(file);
if (NS_WARN_IF(!file)) {
return;
}
OwningFileOrDirectory* element = files.AppendElement();
element->SetAsFile() = file;
@ -2037,6 +2055,10 @@ void HTMLInputElement::MozSetFileNameArray(const Sequence<nsString>& aFileNames,
}
RefPtr<File> domFile = File::CreateFromFile(global, file);
if (NS_WARN_IF(!domFile)) {
aRv.Throw(NS_ERROR_FAILURE);
return;
}
OwningFileOrDirectory* element = files.AppendElement();
element->SetAsFile() = domFile;
@ -2063,7 +2085,7 @@ void HTMLInputElement::MozSetDirectory(const nsAString& aDirectoryPath,
return;
}
RefPtr<Directory> directory = Directory::Create(window, file);
RefPtr<Directory> directory = Directory::Create(window->AsGlobal(), file);
MOZ_ASSERT(directory);
nsTArray<OwningFileOrDirectory> array;
@ -6059,8 +6081,10 @@ static nsTArray<OwningFileOrDirectory> RestoreFileContentData(
continue;
}
RefPtr<File> file = File::Create(aWindow, it.get_BlobImpl());
MOZ_ASSERT(file);
RefPtr<File> file = File::Create(aWindow->AsGlobal(), it.get_BlobImpl());
if (NS_WARN_IF(!file)) {
continue;
}
OwningFileOrDirectory* element = res.AppendElement();
element->SetAsFile() = file;
@ -6073,7 +6097,8 @@ static nsTArray<OwningFileOrDirectory> RestoreFileContentData(
continue;
}
RefPtr<Directory> directory = Directory::Create(aWindow, file);
RefPtr<Directory> directory =
Directory::Create(aWindow->AsGlobal(), file);
MOZ_ASSERT(directory);
OwningFileOrDirectory* element = res.AppendElement();
@ -7074,17 +7099,10 @@ GetFilesHelper* HTMLInputElement::GetOrCreateGetFilesHelper(bool aRecursiveFlag,
ErrorResult& aRv) {
MOZ_ASSERT(mFileData);
nsCOMPtr<nsIGlobalObject> global = OwnerDoc()->GetScopeObject();
MOZ_ASSERT(global);
if (!global) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
if (aRecursiveFlag) {
if (!mFileData->mGetFilesRecursiveHelper) {
mFileData->mGetFilesRecursiveHelper = GetFilesHelper::Create(
global, GetFilesOrDirectoriesInternal(), aRecursiveFlag, aRv);
GetFilesOrDirectoriesInternal(), aRecursiveFlag, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
@ -7095,7 +7113,7 @@ GetFilesHelper* HTMLInputElement::GetOrCreateGetFilesHelper(bool aRecursiveFlag,
if (!mFileData->mGetFilesNonRecursiveHelper) {
mFileData->mGetFilesNonRecursiveHelper = GetFilesHelper::Create(
global, GetFilesOrDirectoriesInternal(), aRecursiveFlag, aRv);
GetFilesOrDirectoriesInternal(), aRecursiveFlag, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}

View File

@ -4995,6 +4995,14 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder) {
// This will also do an AddRemoveSelfReference.
NotifyOwnerDocumentActivityChanged();
if (!mDecoder) {
// NotifyOwnerDocumentActivityChanged may shutdown the decoder if the
// owning document is inactive and we're in the EME case. We could try and
// handle this, but at the time of writing it's a pretty niche case, so just
// bail.
return NS_ERROR_FAILURE;
}
if (mPausedForInactiveDocumentOrChannel) {
mDecoder->Suspend();
}

View File

@ -589,6 +589,7 @@ auto DeserializeStructuredCloneFiles(
RefPtr<Blob> blob =
Blob::Create(aDatabase->GetOwnerGlobal(), blobImpl);
MOZ_ASSERT(blob);
files.EmplaceBack(StructuredCloneFile::eStructuredClone,
std::move(blob));

View File

@ -623,6 +623,27 @@ class ValueDeserializationHelper {
RefPtr<Blob> blob = aFile.mBlob;
// It can happen that this IDB is chrome code, so there is no parent, but
// still we want to set a correct parent for the new File object.
nsCOMPtr<nsIGlobalObject> global;
if (NS_IsMainThread()) {
if (aDatabase && aDatabase->GetParentObject()) {
global = aDatabase->GetParentObject();
} else {
global = xpc::CurrentNativeGlobal(aCx);
}
} else {
WorkerPrivate* workerPrivate = GetCurrentThreadWorkerPrivate();
MOZ_ASSERT(workerPrivate);
WorkerGlobalScope* globalScope = workerPrivate->GlobalScope();
MOZ_ASSERT(globalScope);
global = do_QueryObject(globalScope);
}
MOZ_ASSERT(global);
/* If we are creating an index, we do not have an mBlob but do have an
* mInfo. Unlike other index or upgrade cases, we do need a real-looking
* Blob/File instance because the index's key path can reference their
@ -638,30 +659,12 @@ class ValueDeserializationHelper {
const RefPtr<FileBlobImpl> impl = new FileBlobImpl(file);
impl->SetFileId(aFile.mFileInfo->Id());
blob = File::Create(nullptr, impl);
}
// It can happen that this IDB is chrome code, so there is no parent, but
// still we want to set a correct parent for the new File object.
nsCOMPtr<nsISupports> parent;
if (NS_IsMainThread()) {
if (aDatabase && aDatabase->GetParentObject()) {
parent = aDatabase->GetParentObject();
} else {
parent = xpc::CurrentNativeGlobal(aCx);
blob = File::Create(global, impl);
if (NS_WARN_IF(!blob)) {
return false;
}
} else {
WorkerPrivate* workerPrivate = GetCurrentThreadWorkerPrivate();
MOZ_ASSERT(workerPrivate);
WorkerGlobalScope* globalScope = workerPrivate->GlobalScope();
MOZ_ASSERT(globalScope);
parent = do_QueryObject(globalScope);
}
MOZ_ASSERT(parent);
if (aData.tag == SCTAG_DOM_BLOB) {
blob->Impl()->SetLazyData(VoidString(), aData.type, aData.size,
INT64_MAX);
@ -676,6 +679,10 @@ class ValueDeserializationHelper {
const RefPtr<Blob> exposedBlob =
Blob::Create(blob->GetParentObject(), blob->Impl());
if (NS_WARN_IF(!exposedBlob)) {
return false;
}
MOZ_ASSERT(exposedBlob);
JS::Rooted<JS::Value> wrappedBlob(aCx);
if (!ToJSValue(aCx, exposedBlob, &wrappedBlob)) {

View File

@ -33,7 +33,7 @@ AudioCaptureTrack::AudioCaptureTrack(TrackRate aRate)
mStarted(false) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(AudioCaptureTrack);
mMixer.AddCallback(this);
mMixer.AddCallback(WrapNotNull(this));
}
AudioCaptureTrack::~AudioCaptureTrack() {

View File

@ -6,10 +6,11 @@
#define MOZILLA_AUDIOMIXER_H_
#include "AudioSampleFormat.h"
#include "nsTArray.h"
#include "mozilla/PodOperations.h"
#include "mozilla/LinkedList.h"
#include "AudioStream.h"
#include "nsTArray.h"
#include "mozilla/LinkedList.h"
#include "mozilla/NotNull.h"
#include "mozilla/PodOperations.h"
namespace mozilla {
@ -53,6 +54,7 @@ class AudioMixer {
for (MixerCallback* cb = mCallbacks.getFirst(); cb != nullptr;
cb = cb->getNext()) {
MixerCallbackReceiver* receiver = cb->mReceiver;
MOZ_ASSERT(receiver);
receiver->MixerCallback(mMixedAudio.Elements(),
AudioSampleTypeToFormat<AudioDataValue>::Format,
mChannels, mFrames, mSampleRate);
@ -85,7 +87,7 @@ class AudioMixer {
}
}
void AddCallback(MixerCallbackReceiver* aReceiver) {
void AddCallback(NotNull<MixerCallbackReceiver*> aReceiver) {
mCallbacks.insertBack(new MixerCallback(aReceiver));
}
@ -121,9 +123,9 @@ class AudioMixer {
class MixerCallback : public LinkedListElement<MixerCallback> {
public:
explicit MixerCallback(MixerCallbackReceiver* aReceiver)
explicit MixerCallback(NotNull<MixerCallbackReceiver*> aReceiver)
: mReceiver(aReceiver) {}
MixerCallbackReceiver* mReceiver;
NotNull<MixerCallbackReceiver*> mReceiver;
};
/* Function that is called when the mixing is done. */

View File

@ -3,7 +3,6 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AudioSegment.h"
#include "AudioMixer.h"
#include "AudioChannelFormat.h"
#include <speex/speex_resampler.h>
@ -30,16 +29,13 @@ void AudioSegment::ApplyVolume(float aVolume) {
}
}
void AudioSegment::ResampleChunks(SpeexResamplerState* aResampler,
void AudioSegment::ResampleChunks(nsAutoRef<SpeexResamplerState>& aResampler,
uint32_t* aResamplerChannelCount,
uint32_t aInRate, uint32_t aOutRate) {
if (mChunks.IsEmpty()) {
return;
}
MOZ_ASSERT(
aResampler || IsNull(),
"We can only be here without a resampler if this segment is null.");
AudioSampleFormat format = AUDIO_FORMAT_SILENCE;
for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
if (ci->mBufferFormat != AUDIO_FORMAT_SILENCE) {
@ -53,10 +49,10 @@ void AudioSegment::ResampleChunks(SpeexResamplerState* aResampler,
// the chunks duration.
case AUDIO_FORMAT_SILENCE:
case AUDIO_FORMAT_FLOAT32:
Resample<float>(aResampler, aInRate, aOutRate);
Resample<float>(aResampler, aResamplerChannelCount, aInRate, aOutRate);
break;
case AUDIO_FORMAT_S16:
Resample<int16_t>(aResampler, aInRate, aOutRate);
Resample<int16_t>(aResampler, aResamplerChannelCount, aInRate, aOutRate);
break;
default:
MOZ_ASSERT(false);

View File

@ -5,11 +5,14 @@
#ifndef MOZILLA_AUDIOSEGMENT_H_
#define MOZILLA_AUDIOSEGMENT_H_
#include <speex/speex_resampler.h>
#include "MediaTrackGraph.h"
#include "MediaSegment.h"
#include "AudioSampleFormat.h"
#include "AudioChannelFormat.h"
#include "SharedBuffer.h"
#include "WebAudioUtils.h"
#include "nsAutoRef.h"
#ifdef MOZILLA_INTERNAL_API
# include "mozilla/TimeStamp.h"
#endif
@ -201,7 +204,7 @@ struct AudioChunk {
mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
}
size_t ChannelCount() const { return mChannelData.Length(); }
uint32_t ChannelCount() const { return mChannelData.Length(); }
bool IsMuted() const { return mVolume == 0.0f; }
@ -292,14 +295,16 @@ class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
~AudioSegment() {}
// Resample the whole segment in place.
// Resample the whole segment in place. `aResampler` is an instance of a
// resampler, initialized with `aResamplerChannelCount` channels. If this
// function finds a chunk with more channels, `aResampler` is destroyed and a
// new resampler is created, and `aResamplerChannelCount` is updated with the
// new channel count value.
template <typename T>
void Resample(SpeexResamplerState* aResampler, uint32_t aInRate,
void Resample(nsAutoRef<SpeexResamplerState>& aResampler,
uint32_t* aResamplerChannelCount, uint32_t aInRate,
uint32_t aOutRate) {
mDuration = 0;
#ifdef DEBUG
uint32_t segmentChannelCount = ChannelCount();
#endif
for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
AutoTArray<nsTArray<T>, GUESS_AUDIO_CHANNELS> output;
@ -312,7 +317,17 @@ class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
continue;
}
uint32_t channels = c.mChannelData.Length();
MOZ_ASSERT(channels == segmentChannelCount);
// This might introduce a discontinuity, but a channel count change in the
// middle of a stream is not that common. This also initializes the
// resampler as late as possible.
if (channels != *aResamplerChannelCount) {
SpeexResamplerState* state =
speex_resampler_init(channels, aInRate, aOutRate,
SPEEX_RESAMPLER_QUALITY_DEFAULT, nullptr);
MOZ_ASSERT(state);
aResampler.own(state);
*aResamplerChannelCount = channels;
}
output.SetLength(channels);
bufferPtrs.SetLength(channels);
uint32_t inFrames = c.mDuration;
@ -325,8 +340,8 @@ class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
uint32_t outFrames = outSize;
const T* in = static_cast<const T*>(c.mChannelData[i]);
dom::WebAudioUtils::SpeexResamplerProcess(aResampler, i, in, &inFrames,
out, &outFrames);
dom::WebAudioUtils::SpeexResamplerProcess(aResampler.get(), i, in,
&inFrames, out, &outFrames);
MOZ_ASSERT(inFrames == c.mDuration);
bufferPtrs[i] = out;
@ -342,7 +357,8 @@ class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
}
}
void ResampleChunks(SpeexResamplerState* aResampler, uint32_t aInRate,
void ResampleChunks(nsAutoRef<SpeexResamplerState>& aResampler,
uint32_t* aResamplerChannelCount, uint32_t aInRate,
uint32_t aOutRate);
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
const nsTArray<const float*>& aChannelData,
@ -401,18 +417,17 @@ class AudioSegment : public MediaSegmentBase<AudioSegment, AudioChunk> {
// aChannelCount channels.
void Mix(AudioMixer& aMixer, uint32_t aChannelCount, uint32_t aSampleRate);
int ChannelCount() {
NS_WARNING_ASSERTION(
!mChunks.IsEmpty(),
"Cannot query channel count on a AudioSegment with no chunks.");
// Returns the maximum
uint32_t MaxChannelCount() {
// Find the first chunk that has non-zero channels. A chunk that hs zero
// channels is just silence and we can simply discard it.
uint32_t channelCount = 0;
for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
if (ci->ChannelCount()) {
return ci->ChannelCount();
channelCount = std::max(channelCount, ci->ChannelCount());
}
}
return 0;
return channelCount;
}
static Type StaticType() { return AUDIO; }

View File

@ -453,10 +453,11 @@ TrackAndPromiseForOperation::TrackAndPromiseForOperation(
mFlags(aFlags) {}
AudioCallbackDriver::AudioCallbackDriver(MediaTrackGraphImpl* aGraphImpl,
uint32_t aOutputChannelCount,
uint32_t aInputChannelCount,
AudioInputType aAudioInputType)
: GraphDriver(aGraphImpl),
mOutputChannels(0),
mOutputChannels(aOutputChannelCount),
mSampleRate(0),
mInputChannelCount(aInputChannelCount),
mIterationDurationMS(MEDIA_GRAPH_TARGET_PERIOD_MS),
@ -553,8 +554,6 @@ bool AudioCallbackDriver::Init() {
output.format = CUBEB_SAMPLE_FLOAT32NE;
}
// Query and set the number of channels this AudioCallbackDriver will use.
mOutputChannels = GraphImpl()->AudioOutputChannelCount();
if (!mOutputChannels) {
LOG(LogLevel::Warning, ("Output number of channels is 0."));
Monitor2AutoLock lock(GraphImpl()->GetMonitor());
@ -579,7 +578,10 @@ bool AudioCallbackDriver::Init() {
SpillBuffer<AudioDataValue, WEBAUDIO_BLOCK_SIZE * 2>(mOutputChannels);
output.channels = mOutputChannels;
output.layout = CUBEB_LAYOUT_UNDEFINED;
AudioConfig::ChannelLayout::ChannelMap channelMap =
AudioConfig::ChannelLayout(mOutputChannels).Map();
output.layout = static_cast<uint32_t>(channelMap);
output.prefs = CubebUtils::GetDefaultStreamPrefs();
#if !defined(XP_WIN)
if (mInputDevicePreference == CUBEB_DEVICE_PREF_VOICE) {
@ -725,7 +727,7 @@ void AudioCallbackDriver::AddMixerCallback() {
MOZ_ASSERT(OnGraphThread());
if (!mAddedMixer) {
mGraphImpl->mMixer.AddCallback(this);
mGraphImpl->mMixer.AddCallback(WrapNotNull(this));
mAddedMixer = true;
}
}
@ -799,7 +801,7 @@ long AudioCallbackDriver::DataCallback(const AudioDataValue* aInputBuffer,
// Don't add the callback until we're inited and ready
if (!mAddedMixer) {
GraphImpl()->mMixer.AddCallback(this);
GraphImpl()->mMixer.AddCallback(WrapNotNull(this));
mAddedMixer = true;
}
@ -896,6 +898,18 @@ long AudioCallbackDriver::DataCallback(const AudioDataValue* aInputBuffer,
GraphImpl()->NotifyOutputData(aOutputBuffer, static_cast<size_t>(aFrames),
mSampleRate, mOutputChannels);
#ifdef XP_MACOSX
// This only happens when the output is on a macbookpro's external speaker,
// that are stereo, but let's just be safe.
if (mNeedsPanning && mOutputChannels == 2) {
// hard pan to the right
for (uint32_t i = 0; i < aFrames * 2; i += 2) {
aOutputBuffer[i + 1] += aOutputBuffer[i];
aOutputBuffer[i] = 0.0;
}
}
#endif
if (!stillProcessing) {
// About to hand over control of the graph. Do not start a new driver if
// StateCallback() receives an error for this stream while the main thread
@ -996,7 +1010,7 @@ void AudioCallbackDriver::MixerCallback(AudioDataValue* aMixedBuffer,
void AudioCallbackDriver::PanOutputIfNeeded(bool aMicrophoneActive) {
#ifdef XP_MACOSX
cubeb_device* out;
cubeb_device* out = nullptr;
int rv;
char name[128];
size_t length = sizeof(name);
@ -1008,22 +1022,16 @@ void AudioCallbackDriver::PanOutputIfNeeded(bool aMicrophoneActive) {
if (!strncmp(name, "MacBookPro", 10)) {
if (cubeb_stream_get_current_device(mAudioStream, &out) == CUBEB_OK) {
MOZ_ASSERT(out);
// Check if we are currently outputing sound on external speakers.
if (!strcmp(out->output_name, "ispk")) {
if (out->output_name && !strcmp(out->output_name, "ispk")) {
// Pan everything to the right speaker.
if (aMicrophoneActive) {
if (cubeb_stream_set_panning(mAudioStream, 1.0) != CUBEB_OK) {
NS_WARNING("Could not pan audio output to the right.");
}
} else {
if (cubeb_stream_set_panning(mAudioStream, 0.0) != CUBEB_OK) {
NS_WARNING("Could not pan audio output to the center.");
}
}
LOG(LogLevel::Debug, ("Using the built-in speakers, with%s audio input",
aMicrophoneActive ? "" : "out"));
mNeedsPanning = aMicrophoneActive;
} else {
if (cubeb_stream_set_panning(mAudioStream, 0.0) != CUBEB_OK) {
NS_WARNING("Could not pan audio output to the center.");
}
LOG(LogLevel::Debug, ("Using an external output device"));
mNeedsPanning = false;
}
cubeb_stream_device_destroy(mAudioStream, out);
}
@ -1038,7 +1046,12 @@ void AudioCallbackDriver::DeviceChangedCallback() {
Monitor2AutoLock mon(mGraphImpl->GetMonitor());
GraphImpl()->DeviceChanged();
#ifdef XP_MACOSX
PanOutputIfNeeded(mInputChannelCount);
RefPtr<AudioCallbackDriver> self(this);
bool hasInput = mInputChannelCount;
NS_DispatchBackgroundTask(NS_NewRunnableFunction(
"PanOutputIfNeeded", [self{std::move(self)}, hasInput]() {
self->PanOutputIfNeeded(hasInput);
}));
#endif
}

View File

@ -129,7 +129,7 @@ class GraphDriver {
* before being started again. */
virtual void Start() = 0;
/* Shutdown GraphDriver (synchronously) */
virtual void Shutdown() = 0;
MOZ_CAN_RUN_SCRIPT virtual void Shutdown() = 0;
/* Rate at which the GraphDriver runs, in ms. This can either be user
* controlled (because we are using a {System,Offline}ClockDriver, and decide
* how often we want to wakeup/how much we want to process per iteration), or
@ -231,7 +231,7 @@ class ThreadedDriver : public GraphDriver {
void WaitForNextIteration() override;
void WakeUp() override;
void Start() override;
void Shutdown() override;
MOZ_CAN_RUN_SCRIPT void Shutdown() override;
/**
* Runs main control loop on the graph thread. Normally a single invocation
* of this runs for the entire lifetime of the graph thread.
@ -352,14 +352,14 @@ class AudioCallbackDriver : public GraphDriver,
public:
/** If aInputChannelCount is zero, then this driver is output-only. */
AudioCallbackDriver(MediaTrackGraphImpl* aGraphImpl,
uint32_t aInputChannelCount,
uint32_t aOutputChannelCount, uint32_t aInputChannelCount,
AudioInputType aAudioInputType);
virtual ~AudioCallbackDriver();
void Start() override;
void WaitForNextIteration() override;
void WakeUp() override;
void Shutdown() override;
MOZ_CAN_RUN_SCRIPT void Shutdown() override;
#if defined(XP_WIN)
void ResetDefaultDevice() override;
#endif
@ -459,7 +459,7 @@ class AudioCallbackDriver : public GraphDriver,
}
/* MediaTrackGraphs are always down/up mixed to output channels. */
uint32_t mOutputChannels;
const uint32_t mOutputChannels;
/* The size of this buffer comes from the fact that some audio backends can
* call back with a number of frames lower than one block (128 frames), so we
* need to keep at most two block in the SpillBuffer, because we always round
@ -533,6 +533,12 @@ class AudioCallbackDriver : public GraphDriver,
/* True if this driver was created from a driver created because of a previous
* AudioCallbackDriver failure. */
bool mFromFallback;
#ifdef XP_MACOSX
/* When using the built-in speakers on macbook pro (13 and 15, all models),
* it's best to hard pan the audio on the right, to avoid feedback into the
* microphone that is located next to the left speaker. */
Atomic<bool> mNeedsPanning;
#endif
};
class AsyncCubebTask : public Runnable {

View File

@ -14,30 +14,37 @@
namespace mozilla {
static void Start(void* aArg) {
GraphRunner* th = static_cast<GraphRunner*>(aArg);
th->Run();
}
GraphRunner::GraphRunner(MediaTrackGraphImpl* aGraph)
: mMonitor("GraphRunner::mMonitor"),
GraphRunner::GraphRunner(MediaTrackGraphImpl* aGraph,
already_AddRefed<nsIThread> aThread)
: Runnable("GraphRunner"),
mMonitor("GraphRunner::mMonitor"),
mGraph(aGraph),
mStateEnd(0),
mStillProcessing(true),
mThreadState(ThreadState::Wait),
// Note that mThread needs to be initialized last, as it may pre-empt the
// thread running this ctor and enter Run() with uninitialized members.
mThread(PR_CreateThread(PR_SYSTEM_THREAD, &Start, this,
PR_PRIORITY_URGENT, PR_GLOBAL_THREAD,
PR_JOINABLE_THREAD, 0)) {
MOZ_COUNT_CTOR(GraphRunner);
mThread(aThread) {
mThread->Dispatch(do_AddRef(this));
}
GraphRunner::~GraphRunner() {
MOZ_COUNT_DTOR(GraphRunner);
MOZ_ASSERT(mThreadState == ThreadState::Shutdown);
}
/* static */
already_AddRefed<GraphRunner> GraphRunner::Create(MediaTrackGraphImpl* aGraph) {
nsCOMPtr<nsIThread> thread;
if (NS_WARN_IF(NS_FAILED(
NS_NewNamedThread("GraphRunner", getter_AddRefs(thread))))) {
return nullptr;
}
nsCOMPtr<nsISupportsPriority> supportsPriority = do_QueryInterface(thread);
MOZ_ASSERT(supportsPriority);
MOZ_ALWAYS_SUCCEEDS(
supportsPriority->SetPriority(nsISupportsPriority::PRIORITY_HIGHEST));
return do_AddRef(new GraphRunner(aGraph, thread.forget()));
}
void GraphRunner::Shutdown() {
{
Monitor2AutoLock lock(mMonitor);
@ -45,12 +52,7 @@ void GraphRunner::Shutdown() {
mThreadState = ThreadState::Shutdown;
mMonitor.Signal();
}
// We need to wait for runner thread shutdown here for the sake of the
// xpcomWillShutdown case, so that the main thread is not shut down before
// cleanup messages are sent for objects destroyed in
// CycleCollectedJSContext shutdown.
PR_JoinThread(mThread);
mThread = nullptr;
mThread->Shutdown();
}
bool GraphRunner::OneIteration(GraphTime aStateEnd) {
@ -86,8 +88,7 @@ bool GraphRunner::OneIteration(GraphTime aStateEnd) {
return mStillProcessing;
}
void GraphRunner::Run() {
PR_SetCurrentThreadName("GraphRunner");
NS_IMETHODIMP GraphRunner::Run() {
Monitor2AutoLock lock(mMonitor);
while (true) {
while (mThreadState == ThreadState::Wait) {
@ -104,9 +105,13 @@ void GraphRunner::Run() {
}
dom::WorkletThread::DeleteCycleCollectedJSContext();
return NS_OK;
}
bool GraphRunner::OnThread() { return PR_GetCurrentThread() == mThread; }
bool GraphRunner::OnThread() {
return mThread->EventTarget()->IsOnCurrentThread();
}
#ifdef DEBUG
bool GraphRunner::RunByGraphDriver(GraphDriver* aDriver) {

View File

@ -17,15 +17,14 @@ namespace mozilla {
class GraphDriver;
class MediaTrackGraphImpl;
class GraphRunner {
class GraphRunner final : public Runnable {
public:
explicit GraphRunner(MediaTrackGraphImpl* aGraph);
~GraphRunner();
static already_AddRefed<GraphRunner> Create(MediaTrackGraphImpl* aGraph);
/**
* Marks us as shut down and signals mThread, so that it runs until the end.
*/
void Shutdown();
MOZ_CAN_RUN_SCRIPT void Shutdown();
/**
* Signals one iteration of mGraph. Hands aStateEnd over to mThread and runs
@ -36,7 +35,7 @@ class GraphRunner {
/**
* Runs mGraph until it shuts down.
*/
void Run();
NS_IMETHOD Run();
/**
* Returns true if called on mThread.
@ -52,6 +51,10 @@ class GraphRunner {
#endif
private:
explicit GraphRunner(MediaTrackGraphImpl* aGraph,
already_AddRefed<nsIThread> aThread);
~GraphRunner();
// Monitor used for yielding mThread through Wait(), and scheduling mThread
// through Signal() from a GraphDriver.
Monitor2 mMonitor;
@ -78,7 +81,7 @@ class GraphRunner {
// The thread running mGraph. Set on construction, after other members are
// initialized. Cleared at the end of Shutdown().
PRThread* mThread;
const nsCOMPtr<nsIThread> mThread;
#ifdef DEBUG
// Set to mGraph's audio callback driver's thread id, if run by an

View File

@ -222,25 +222,6 @@ VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
VideoData::~VideoData() {}
void VideoData::SetListener(UniquePtr<Listener> aListener) {
MOZ_ASSERT(!mSentToCompositor,
"Listener should be registered before sending data");
mListener = std::move(aListener);
}
void VideoData::MarkSentToCompositor() {
if (mSentToCompositor) {
return;
}
mSentToCompositor = true;
if (mListener != nullptr) {
mListener->OnSentToCompositor();
mListener = nullptr;
}
}
size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
size_t size = aMallocSizeOf(this);

View File

@ -449,12 +449,6 @@ class VideoData : public MediaData {
ColorRange mColorRange = ColorRange::LIMITED;
};
class Listener {
public:
virtual void OnSentToCompositor() = 0;
virtual ~Listener() {}
};
// Constructs a VideoData object. If aImage is nullptr, creates a new Image
// holding a copy of the YCbCr data passed in aBuffer. If aImage is not
// nullptr, it's stored as the underlying video image and aBuffer is assumed
@ -509,8 +503,7 @@ class VideoData : public MediaData {
const media::TimeUnit& aTimecode, IntSize aDisplay,
uint32_t aFrameID);
void SetListener(UniquePtr<Listener> aListener);
void MarkSentToCompositor();
void MarkSentToCompositor() { mSentToCompositor = true; }
bool IsSentToCompositor() { return mSentToCompositor; }
void UpdateDuration(const media::TimeUnit& aDuration);
@ -526,7 +519,6 @@ class VideoData : public MediaData {
~VideoData();
bool mSentToCompositor;
UniquePtr<Listener> mListener;
media::TimeUnit mNextKeyFrameTime;
};

View File

@ -3000,3 +3000,5 @@ void MediaFormatReader::OnFirstDemuxFailed(TrackInfo::TrackType aType,
} // namespace mozilla
#undef NS_DispatchToMainThread
#undef LOGV
#undef LOG

View File

@ -159,10 +159,6 @@ class nsMainThreadPtrHolder<
namespace mozilla {
#ifdef LOG
# undef LOG
#endif
LazyLogModule gMediaManagerLog("MediaManager");
#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
@ -4703,4 +4699,6 @@ void GetUserMediaWindowListener::NotifyChrome() {
}));
}
#undef LOG
} // namespace mozilla

File diff suppressed because it is too large Load Diff

View File

@ -23,7 +23,7 @@ class GlobalObject;
namespace dom {
class AudioNode;
class Blob;
class BlobImpl;
class Document;
class DOMException;
@ -47,10 +47,7 @@ class MediaRecorder final : public DOMEventTargetHelper,
public:
class Session;
MediaRecorder(DOMMediaStream& aSourceMediaTrack,
nsPIDOMWindowInner* aOwnerWindow);
MediaRecorder(AudioNode& aSrcAudioNode, uint32_t aSrcOutput,
nsPIDOMWindowInner* aOwnerWindow);
explicit MediaRecorder(nsPIDOMWindowInner* aOwnerWindow);
static nsTArray<RefPtr<Session>> GetSessions();
@ -58,8 +55,6 @@ class MediaRecorder final : public DOMEventTargetHelper,
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
nsPIDOMWindowInner* GetParentObject() { return GetOwner(); }
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaRecorder, DOMEventTargetHelper)
@ -79,23 +74,24 @@ class MediaRecorder final : public DOMEventTargetHelper,
// Extract encoded data Blob from MutableBlobStorage.
void RequestData(ErrorResult& aResult);
// Return the The DOMMediaStream passed from UA.
DOMMediaStream* Stream() const { return mDOMStream; }
DOMMediaStream* Stream() const { return mStream; }
// Return the current encoding MIME type selected by the MediaEncoder.
void GetMimeType(nsString& aMimeType);
// The current state of the MediaRecorder object.
RecordingState State() const { return mState; }
static bool IsTypeSupported(GlobalObject& aGlobal, const nsAString& aType);
static bool IsTypeSupported(const nsAString& aType);
static bool IsTypeSupported(GlobalObject& aGlobal,
const nsAString& aMIMEType);
static bool IsTypeSupported(const nsAString& aMIMEType);
// Construct a recorder with a DOM media stream object as its source.
static already_AddRefed<MediaRecorder> Constructor(
const GlobalObject& aGlobal, DOMMediaStream& aStream,
const MediaRecorderOptions& aInitDict, ErrorResult& aRv);
const MediaRecorderOptions& aOptions, ErrorResult& aRv);
// Construct a recorder with a Web Audio destination node as its source.
static already_AddRefed<MediaRecorder> Constructor(
const GlobalObject& aGlobal, AudioNode& aSrcAudioNode,
uint32_t aSrcOutput, const MediaRecorderOptions& aInitDict,
const GlobalObject& aGlobal, AudioNode& aAudioNode,
uint32_t aAudioNodeOutput, const MediaRecorderOptions& aOptions,
ErrorResult& aRv);
/*
@ -112,27 +108,22 @@ class MediaRecorder final : public DOMEventTargetHelper,
IMPL_EVENT_HANDLER(pause)
IMPL_EVENT_HANDLER(resume)
IMPL_EVENT_HANDLER(error)
IMPL_EVENT_HANDLER(warning)
NS_DECL_NSIDOCUMENTACTIVITY
uint32_t GetAudioBitrate() { return mAudioBitsPerSecond; }
uint32_t GetVideoBitrate() { return mVideoBitsPerSecond; }
uint32_t GetBitrate() { return mBitsPerSecond; }
uint32_t AudioBitsPerSecond() const { return mAudioBitsPerSecond; }
uint32_t VideoBitsPerSecond() const { return mVideoBitsPerSecond; }
protected:
virtual ~MediaRecorder();
MediaRecorder& operator=(const MediaRecorder& x) = delete;
// Create dataavailable event with Blob data and it runs in main thread
nsresult CreateAndDispatchBlobEvent(Blob* aBlob);
nsresult CreateAndDispatchBlobEvent(BlobImpl* aBlobImpl);
// Creating a simple event to notify UA simple event.
void DispatchSimpleEvent(const nsAString& aStr);
// Creating a error event with message.
void NotifyError(nsresult aRv);
// Set encoded MIME type.
void SetMimeType(const nsString& aMimeType);
void SetOptions(const MediaRecorderOptions& aInitDict);
MediaRecorder(const MediaRecorder& x) = delete; // prevent bad usage
// Remove session pointer.
@ -143,38 +134,33 @@ class MediaRecorder final : public DOMEventTargetHelper,
// available at the time the error event is fired. Note, depending on when
// this is called there may not be a JS stack to capture.
void InitializeDomExceptions();
// Set the recorder state to inactive. This is needed to handle error states
// in the recorder where state must transition to inactive before full
// stoppage can be reached.
void ForceInactive();
// Runs the "Inactivate the recorder" algorithm.
void Inactivate();
// Stop the recorder and its internal session. This should be used by
// sessions that are in the process of being destroyed.
void StopForSessionDestruction();
// DOM wrapper for source media stream. Will be null when input is audio node.
RefPtr<DOMMediaStream> mDOMStream;
RefPtr<DOMMediaStream> mStream;
// Source audio node. Will be null when input is a media stream.
RefPtr<AudioNode> mAudioNode;
// Source audio node's output index. Will be zero when input is a media
// stream.
const uint32_t mAudioNodeOutput;
uint32_t mAudioNodeOutput = 0;
// The current state of the MediaRecorder object.
RecordingState mState;
RecordingState mState = RecordingState::Inactive;
// Hold the sessions reference and clean it when the DestroyRunnable for a
// session is running.
nsTArray<RefPtr<Session>> mSessions;
RefPtr<Document> mDocument;
// It specifies the container format as well as the audio and video capture
// formats.
nsString mMimeType;
nsString mConstrainedMimeType;
uint32_t mAudioBitsPerSecond;
uint32_t mVideoBitsPerSecond;
uint32_t mBitsPerSecond;
TimeStamp mStartTime;
uint32_t mAudioBitsPerSecond = 0;
uint32_t mVideoBitsPerSecond = 0;
Maybe<uint32_t> mConstrainedBitsPerSecond;
// DOMExceptions that are created early and possibly thrown in NotifyError.
// Creating them early allows us to capture the JS stack for which cannot be

View File

@ -99,6 +99,8 @@ void MediaTrackGraphImpl::RemoveTrackGraphThread(MediaTrack* aTrack) {
// Ensure that mFirstCycleBreaker and mMixer are updated when necessary.
SetTrackOrderDirty();
UnregisterAllAudioOutputs(aTrack);
if (aTrack->IsSuspended()) {
mSuspendedTracks.RemoveElement(aTrack);
} else {
@ -282,6 +284,7 @@ bool MediaTrackGraphImpl::AudioTrackPresent() {
void MediaTrackGraphImpl::UpdateTrackOrder() {
MOZ_ASSERT(OnGraphThread());
bool audioTrackPresent = AudioTrackPresent();
uint32_t graphOutputChannelCount = AudioOutputChannelCount();
// Note that this looks for any audio tracks, input or output, and switches
// to a SystemClockDriver if there are none. However, if another is already
@ -306,11 +309,30 @@ void MediaTrackGraphImpl::UpdateTrackOrder() {
}
if (audioTrackPresent && mRealtime &&
!CurrentDriver()->AsAudioCallbackDriver() && !switching) {
!CurrentDriver()->AsAudioCallbackDriver() && !switching &&
graphOutputChannelCount > 0) {
Monitor2AutoLock mon(mMonitor);
if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
AudioCallbackDriver* driver = new AudioCallbackDriver(
this, AudioInputChannelCount(), AudioInputDevicePreference());
this, graphOutputChannelCount, AudioInputChannelCount(),
AudioInputDevicePreference());
CurrentDriver()->SwitchAtNextIteration(driver);
}
}
// Check if this graph should switch to a different number of output channels.
// Generally, a driver switch is explicitly made by an event (e.g., setting
// the AudioDestinationNode channelCount), but if an HTMLMediaElement is
// directly playing back via another HTMLMediaElement, the number of channels
// of the media determines how many channels to output, and it can change
// dynamically.
if (CurrentDriver()->AsAudioCallbackDriver() && !switching) {
if (graphOutputChannelCount !=
CurrentDriver()->AsAudioCallbackDriver()->OutputChannelCount()) {
AudioCallbackDriver* driver = new AudioCallbackDriver(
this, graphOutputChannelCount, AudioInputChannelCount(),
AudioInputDevicePreference());
Monitor2AutoLock mon(mMonitor);
CurrentDriver()->SwitchAtNextIteration(driver);
}
}
@ -501,134 +523,97 @@ void MediaTrackGraphImpl::UpdateTrackOrder() {
MOZ_ASSERT(orderedTrackCount == mFirstCycleBreaker);
}
void MediaTrackGraphImpl::CreateOrDestroyAudioTracks(MediaTrack* aTrack) {
MOZ_ASSERT(OnGraphThread());
MOZ_ASSERT(mRealtime,
"Should only attempt to create audio tracks in real-time mode");
if (aTrack->mAudioOutputs.IsEmpty()) {
aTrack->mAudioOutputStream = nullptr;
return;
}
if (aTrack->mAudioOutputStream) {
return;
}
LOG(LogLevel::Debug,
("%p: Updating AudioOutputStream for MediaTrack %p", this, aTrack));
aTrack->mAudioOutputStream = MakeUnique<MediaTrack::AudioOutputStream>();
aTrack->mAudioOutputStream->mAudioPlaybackStartTime = mProcessedTime;
aTrack->mAudioOutputStream->mBlockedAudioTime = 0;
aTrack->mAudioOutputStream->mLastTickWritten = 0;
bool switching = false;
{
Monitor2AutoLock lock(mMonitor);
switching = CurrentDriver()->Switching();
}
if (!CurrentDriver()->AsAudioCallbackDriver() && !switching) {
Monitor2AutoLock mon(mMonitor);
if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
AudioCallbackDriver* driver = new AudioCallbackDriver(
this, AudioInputChannelCount(), AudioInputDevicePreference());
CurrentDriver()->SwitchAtNextIteration(driver);
}
}
}
TrackTime MediaTrackGraphImpl::PlayAudio(MediaTrack* aTrack) {
TrackTime MediaTrackGraphImpl::PlayAudio(const TrackKeyAndVolume& aTkv,
GraphTime aPlayedTime) {
MOZ_ASSERT(OnGraphThread());
MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
float volume = 0.0f;
for (uint32_t i = 0; i < aTrack->mAudioOutputs.Length(); ++i) {
volume += aTrack->mAudioOutputs[i].mVolume * mGlobalVolume;
}
TrackTime ticksWritten = 0;
if (aTrack->mAudioOutputStream) {
ticksWritten = 0;
ticksWritten = 0;
MediaTrack* track = aTkv.mTrack;
AudioSegment* audio = track->GetData<AudioSegment>();
AudioSegment output;
MediaTrack::AudioOutputStream& audioOutput = *aTrack->mAudioOutputStream;
AudioSegment* audio = aTrack->GetData<AudioSegment>();
AudioSegment output;
TrackTime offset = track->GraphTimeToTrackTime(aPlayedTime);
TrackTime offset = aTrack->GraphTimeToTrackTime(mProcessedTime);
// We don't update Track->mTracksStartTime here to account for time spent
// blocked. Instead, we'll update it in UpdateCurrentTimeForTracks after
// the blocked period has completed. But we do need to make sure we play
// from the right offsets in the track buffer, even if we've already
// written silence for some amount of blocked time after the current time.
GraphTime t = aPlayedTime;
while (t < mStateComputedTime) {
bool blocked = t >= track->mStartBlocking;
GraphTime end = blocked ? mStateComputedTime : track->mStartBlocking;
NS_ASSERTION(end <= mStateComputedTime, "mStartBlocking is wrong!");
// We don't update aTrack->mTracksStartTime here to account for time spent
// blocked. Instead, we'll update it in UpdateCurrentTimeForTracks after
// the blocked period has completed. But we do need to make sure we play
// from the right offsets in the track buffer, even if we've already
// written silence for some amount of blocked time after the current time.
GraphTime t = mProcessedTime;
while (t < mStateComputedTime) {
bool blocked = t >= aTrack->mStartBlocking;
GraphTime end = blocked ? mStateComputedTime : aTrack->mStartBlocking;
NS_ASSERTION(end <= mStateComputedTime, "mStartBlocking is wrong!");
// Check how many ticks of sound we can provide if we are blocked some
// time in the middle of this cycle.
TrackTime toWrite = end - t;
// Check how many ticks of sound we can provide if we are blocked some
// time in the middle of this cycle.
TrackTime toWrite = end - t;
if (blocked) {
output.InsertNullDataAtStart(toWrite);
ticksWritten += toWrite;
LOG(LogLevel::Verbose,
("%p: MediaTrack %p writing %" PRId64 " blocking-silence samples for "
"%f to %f (%" PRId64 " to %" PRId64 ")",
this, track, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
offset, offset + toWrite));
} else {
TrackTime endTicksNeeded = offset + toWrite;
TrackTime endTicksAvailable = audio->GetDuration();
if (blocked) {
output.InsertNullDataAtStart(toWrite);
ticksWritten += toWrite;
if (endTicksNeeded <= endTicksAvailable) {
LOG(LogLevel::Verbose,
("%p: MediaTrack %p writing %" PRId64
" blocking-silence samples for "
"%f to %f (%" PRId64 " to %" PRId64 ")",
this, aTrack, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, offset + toWrite));
("%p: MediaTrack %p writing %" PRId64 " samples for %f to %f "
"(samples %" PRId64 " to %" PRId64 ")",
this, track, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, endTicksNeeded));
output.AppendSlice(*audio, offset, endTicksNeeded);
ticksWritten += toWrite;
offset = endTicksNeeded;
} else {
TrackTime endTicksNeeded = offset + toWrite;
TrackTime endTicksAvailable = audio->GetDuration();
// MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not
// ended."); If we are at the end of the track, maybe write the
// remaining samples, and pad with/output silence.
if (endTicksNeeded > endTicksAvailable && offset < endTicksAvailable) {
output.AppendSlice(*audio, offset, endTicksAvailable);
if (endTicksNeeded <= endTicksAvailable) {
LOG(LogLevel::Verbose,
("%p: MediaTrack %p writing %" PRId64 " samples for %f to %f "
"(samples %" PRId64 " to %" PRId64 ")",
this, aTrack, toWrite, MediaTimeToSeconds(t),
this, track, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, endTicksNeeded));
output.AppendSlice(*audio, offset, endTicksNeeded);
ticksWritten += toWrite;
offset = endTicksNeeded;
} else {
// MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not
// ended."); If we are at the end of the track, maybe write the
// remaining samples, and pad with/output silence.
if (endTicksNeeded > endTicksAvailable &&
offset < endTicksAvailable) {
output.AppendSlice(*audio, offset, endTicksAvailable);
LOG(LogLevel::Verbose,
("%p: MediaTrack %p writing %" PRId64 " samples for %f to %f "
"(samples %" PRId64 " to %" PRId64 ")",
this, aTrack, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, endTicksNeeded));
uint32_t available = endTicksAvailable - offset;
ticksWritten += available;
toWrite -= available;
offset = endTicksAvailable;
}
output.AppendNullData(toWrite);
LOG(LogLevel::Verbose,
("%p MediaTrack %p writing %" PRId64
" padding slsamples for %f to "
"%f (samples %" PRId64 " to %" PRId64 ")",
this, aTrack, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, endTicksNeeded));
ticksWritten += toWrite;
uint32_t available = endTicksAvailable - offset;
ticksWritten += available;
toWrite -= available;
offset = endTicksAvailable;
}
output.ApplyVolume(volume);
output.AppendNullData(toWrite);
LOG(LogLevel::Verbose,
("%p MediaTrack %p writing %" PRId64 " padding slsamples for %f to "
"%f (samples %" PRId64 " to %" PRId64 ")",
this, track, toWrite, MediaTimeToSeconds(t),
MediaTimeToSeconds(end), offset, endTicksNeeded));
ticksWritten += toWrite;
}
t = end;
output.ApplyVolume(mGlobalVolume * aTkv.mVolume);
}
audioOutput.mLastTickWritten = offset;
t = end;
output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
uint32_t outputChannels;
// Use the number of channel the driver expects: this is the number of
// channel that can be output by the underlying system level audio stream.
// Fall back to something sensible if this graph is being driven by a normal
// thread (this can happen when there are no output devices, etc.).
if (CurrentDriver()->AsAudioCallbackDriver()) {
outputChannels =
CurrentDriver()->AsAudioCallbackDriver()->OutputChannelCount();
} else {
outputChannels = AudioOutputChannelCount();
}
output.WriteTo(mMixer, outputChannels, mSampleRate);
}
return ticksWritten;
}
@ -656,7 +641,8 @@ void MediaTrackGraphImpl::OpenAudioInputImpl(CubebUtils::AudioDeviceID aID,
Monitor2AutoLock mon(mMonitor);
if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
AudioCallbackDriver* driver = new AudioCallbackDriver(
this, AudioInputChannelCount(), AudioInputDevicePreference());
this, AudioOutputChannelCount(), AudioInputChannelCount(),
AudioInputDevicePreference());
LOG(LogLevel::Debug,
("%p OpenAudioInput: starting new AudioCallbackDriver(input) %p",
this, driver));
@ -732,7 +718,8 @@ void MediaTrackGraphImpl::CloseAudioInputImpl(
LOG(LogLevel::Debug,
("%p: CloseInput: output present (AudioCallback)", this));
driver = new AudioCallbackDriver(this, AudioInputChannelCount(),
driver = new AudioCallbackDriver(this, AudioOutputChannelCount(),
AudioInputChannelCount(),
AudioInputDevicePreference());
CurrentDriver()->SwitchAtNextIteration(driver);
} else if (CurrentDriver()->AsAudioCallbackDriver()) {
@ -745,6 +732,51 @@ void MediaTrackGraphImpl::CloseAudioInputImpl(
}
}
void MediaTrackGraphImpl::RegisterAudioOutput(MediaTrack* aTrack, void* aKey) {
MOZ_ASSERT(OnGraphThreadOrNotRunning());
TrackKeyAndVolume* tkv = mAudioOutputs.AppendElement();
tkv->mTrack = aTrack;
tkv->mKey = aKey;
tkv->mVolume = 1.0;
bool switching = false;
{
Monitor2AutoLock lock(mMonitor);
switching = CurrentDriver()->Switching();
}
if (!CurrentDriver()->AsAudioCallbackDriver() && !switching) {
Monitor2AutoLock mon(mMonitor);
if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
AudioCallbackDriver* driver = new AudioCallbackDriver(
this, AudioOutputChannelCount(), AudioInputChannelCount(),
AudioInputDevicePreference());
CurrentDriver()->SwitchAtNextIteration(driver);
}
}
}
void MediaTrackGraphImpl::UnregisterAllAudioOutputs(MediaTrack* aTrack) {
MOZ_ASSERT(OnGraphThreadOrNotRunning());
for (int32_t i = mAudioOutputs.Length() - 1; i >= 0; i--) {
if (mAudioOutputs[i].mTrack == aTrack) {
mAudioOutputs.RemoveElementAt(i);
}
}
}
void MediaTrackGraphImpl::UnregisterAudioOutput(MediaTrack* aTrack,
void* aKey) {
MOZ_ASSERT(OnGraphThreadOrNotRunning());
mAudioOutputs.RemoveElementsBy(
[&aKey, &aTrack](const TrackKeyAndVolume& aTkv) {
return aTkv.mKey == aKey && aTkv.mTrack == aTrack;
});
}
void MediaTrackGraphImpl::CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID,
AudioDataListener* aListener) {
MOZ_ASSERT(NS_IsMainThread());
@ -907,7 +939,8 @@ void MediaTrackGraphImpl::ReevaluateInputDevice() {
}
if (needToSwitch) {
AudioCallbackDriver* newDriver = new AudioCallbackDriver(
this, AudioInputChannelCount(), AudioInputDevicePreference());
this, AudioOutputChannelCount(), AudioInputChannelCount(),
AudioInputDevicePreference());
{
Monitor2AutoLock lock(mMonitor);
CurrentDriver()->SwitchAtNextIteration(newDriver);
@ -1032,25 +1065,24 @@ void MediaTrackGraphImpl::ProduceDataForTracksBlockByBlock(
MOZ_ASSERT(OnGraphThread());
MOZ_ASSERT(aTrackIndex <= mFirstCycleBreaker,
"Cycle breaker is not AudioNodeTrack?");
GraphTime t = mProcessedTime;
while (t < mStateComputedTime) {
GraphTime next = RoundUpToNextAudioBlock(t);
while (mProcessedTime < mStateComputedTime) {
GraphTime next = RoundUpToNextAudioBlock(mProcessedTime);
for (uint32_t i = mFirstCycleBreaker; i < mTracks.Length(); ++i) {
auto nt = static_cast<AudioNodeTrack*>(mTracks[i]);
MOZ_ASSERT(nt->AsAudioNodeTrack());
nt->ProduceOutputBeforeInput(t);
nt->ProduceOutputBeforeInput(mProcessedTime);
}
for (uint32_t i = aTrackIndex; i < mTracks.Length(); ++i) {
ProcessedMediaTrack* pt = mTracks[i]->AsProcessedTrack();
if (pt) {
pt->ProcessInput(
t, next,
mProcessedTime, next,
(next == mStateComputedTime) ? ProcessedMediaTrack::ALLOW_END : 0);
}
}
t = next;
mProcessedTime = next;
}
NS_ASSERTION(t == mStateComputedTime,
NS_ASSERTION(mProcessedTime == mStateComputedTime,
"Something went wrong with rounding to block boundaries");
}
@ -1183,9 +1215,7 @@ void MediaTrackGraphImpl::Process() {
bool allBlockedForever = true;
// True when we've done ProcessInput for all processed tracks.
bool doneAllProducing = false;
// This is the number of frame that are written to the AudioStreams, for
// this cycle.
TrackTime ticksPlayed = 0;
const GraphTime oldProcessedTime = mProcessedTime;
mMixer.StartMixing();
@ -1223,23 +1253,29 @@ void MediaTrackGraphImpl::Process() {
}
}
}
// Only playback audio and video in real-time mode
if (mRealtime) {
CreateOrDestroyAudioTracks(track);
if (CurrentDriver()->AsAudioCallbackDriver()) {
TrackTime ticksPlayedForThisTrack = PlayAudio(track);
if (!ticksPlayed) {
if (track->mStartBlocking > oldProcessedTime) {
allBlockedForever = false;
}
}
mProcessedTime = mStateComputedTime;
// This is the number of frames that are written to the output buffer, for
// this iteration.
TrackTime ticksPlayed = 0;
// Only playback audio and video in real-time mode
if (mRealtime) {
if (CurrentDriver()->AsAudioCallbackDriver()) {
for (auto& t : mAudioOutputs) {
TrackTime ticksPlayedForThisTrack = PlayAudio(t, oldProcessedTime);
if (ticksPlayed == 0) {
ticksPlayed = ticksPlayedForThisTrack;
} else {
MOZ_ASSERT(!ticksPlayedForThisTrack ||
ticksPlayedForThisTrack == ticksPlayed,
"Each track should have the same number of frame.");
"Each track should have the same number of frames.");
}
}
}
if (track->mStartBlocking > mProcessedTime) {
allBlockedForever = false;
}
}
if (CurrentDriver()->AsAudioCallbackDriver()) {
@ -1249,7 +1285,7 @@ void MediaTrackGraphImpl::Process() {
// been processed. (bug 1406027)
mMixer.Mix(nullptr,
CurrentDriver()->AsAudioCallbackDriver()->OutputChannelCount(),
mStateComputedTime - mProcessedTime, mSampleRate);
mStateComputedTime - oldProcessedTime, mSampleRate);
}
mMixer.FinishMixing();
}
@ -1307,15 +1343,20 @@ bool MediaTrackGraphImpl::OneIterationImpl(GraphTime aStateEnd) {
// Process graph message from the main thread for this iteration.
RunMessagesInQueue();
// Process MessagePort events.
// These require a single thread, which has an nsThread with an event queue.
if (mGraphRunner || !mRealtime) {
NS_ProcessPendingEvents(nullptr);
}
GraphTime stateEnd = std::min(aStateEnd, GraphTime(mEndTime));
UpdateGraph(stateEnd);
mStateComputedTime = stateEnd;
Process();
GraphTime oldProcessedTime = mProcessedTime;
mProcessedTime = stateEnd;
Process();
MOZ_ASSERT(mProcessedTime == stateEnd);
UpdateCurrentTimeForTracks(oldProcessedTime);
@ -1431,7 +1472,9 @@ class MediaTrackGraphShutDownRunnable : public Runnable {
public:
explicit MediaTrackGraphShutDownRunnable(MediaTrackGraphImpl* aGraph)
: Runnable("MediaTrackGraphShutDownRunnable"), mGraph(aGraph) {}
NS_IMETHOD Run() override {
// MOZ_CAN_RUN_SCRIPT_BOUNDARY until Runnable::Run is MOZ_CAN_RUN_SCRIPT.
// See bug 1535398.
MOZ_CAN_RUN_SCRIPT_BOUNDARY NS_IMETHOD Run() override {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mGraph->mDetectedNotRunning && mGraph->mDriver,
"We should know the graph thread control loop isn't running!");
@ -1450,12 +1493,12 @@ class MediaTrackGraphShutDownRunnable : public Runnable {
#endif
if (mGraph->mGraphRunner) {
mGraph->mGraphRunner->Shutdown();
RefPtr<GraphRunner>(mGraph->mGraphRunner)->Shutdown();
}
mGraph->mDriver
->Shutdown(); // This will wait until it's shutdown since
// we'll start tearing down the graph after this
// This will wait until it's shutdown since
// we'll start tearing down the graph after this
RefPtr<GraphDriver>(mGraph->mDriver)->Shutdown();
// Release the driver now so that an AudioCallbackDriver will release its
// SharedThreadPool reference. Each SharedThreadPool reference must be
@ -1798,13 +1841,10 @@ size_t MediaTrack::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
// Future:
// - mLastPlayedVideoFrame
// - mTrackListeners - elements
// - mAudioOutputStream - elements
amount += mAudioOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mTrackListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mMainThreadListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mConsumers.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += aMallocSizeOf(mAudioOutputStream.get());
return amount;
}
@ -1939,14 +1979,20 @@ void MediaTrack::AddAudioOutput(void* aKey) {
GraphImpl()->AppendMessage(MakeUnique<Message>(this, aKey));
}
void MediaTrack::SetAudioOutputVolumeImpl(void* aKey, float aVolume) {
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
if (mAudioOutputs[i].mKey == aKey) {
mAudioOutputs[i].mVolume = aVolume;
void MediaTrackGraphImpl::SetAudioOutputVolume(MediaTrack* aTrack, void* aKey,
float aVolume) {
for (auto& tkv : mAudioOutputs) {
if (tkv.mKey == aKey && aTrack == tkv.mTrack) {
tkv.mVolume = aVolume;
return;
}
}
NS_ERROR("Audio output key not found");
MOZ_CRASH("Audio stream key not found when setting the volume.");
}
void MediaTrack::SetAudioOutputVolumeImpl(void* aKey, float aVolume) {
MOZ_ASSERT(GraphImpl()->OnGraphThread());
GraphImpl()->SetAudioOutputVolume(this, aKey, aVolume);
}
void MediaTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
@ -1965,27 +2011,19 @@ void MediaTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
}
void MediaTrack::AddAudioOutputImpl(void* aKey) {
LOG(LogLevel::Info,
("MediaTrack %p Adding AudioOutput for key %p", this, aKey));
mAudioOutputs.AppendElement(AudioOutput(aKey));
LOG(LogLevel::Info, ("MediaTrack %p adding AudioOutput", this));
GraphImpl()->RegisterAudioOutput(this, aKey);
}
void MediaTrack::RemoveAudioOutputImpl(void* aKey) {
LOG(LogLevel::Info,
("MediaTrack %p Removing AudioOutput for key %p", this, aKey));
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
if (mAudioOutputs[i].mKey == aKey) {
mAudioOutputs.RemoveElementAt(i);
return;
}
}
NS_ERROR("Audio output key not found");
LOG(LogLevel::Info, ("MediaTrack %p removing AudioOutput", this));
GraphImpl()->UnregisterAudioOutput(this, aKey);
}
void MediaTrack::RemoveAudioOutput(void* aKey) {
class Message : public ControlMessage {
public:
Message(MediaTrack* aTrack, void* aKey)
explicit Message(MediaTrack* aTrack, void* aKey)
: ControlMessage(aTrack), mKey(aKey) {}
void Run() override { mTrack->RemoveAudioOutputImpl(mKey); }
void* mKey;
@ -2397,8 +2435,17 @@ static void MoveToSegment(SourceMediaTrack* aTrack, MediaSegment* aIn,
TrackTime aDesiredUpToTime) {
MOZ_ASSERT(aIn->GetType() == aOut->GetType());
MOZ_ASSERT(aOut->GetDuration() >= aCurrentTime);
MOZ_ASSERT(aDesiredUpToTime >= aCurrentTime);
if (aIn->GetType() == MediaSegment::AUDIO) {
aOut->AppendFrom(aIn);
AudioSegment* in = static_cast<AudioSegment*>(aIn);
AudioSegment* out = static_cast<AudioSegment*>(aOut);
TrackTime desiredDurationToMove = aDesiredUpToTime - aCurrentTime;
TrackTime end = std::min(in->GetDuration(), desiredDurationToMove);
out->AppendSlice(*in, 0, end);
in->RemoveLeading(end);
out->ApplyVolume(aTrack->GetVolumeLocked());
} else {
VideoSegment* in = static_cast<VideoSegment*>(aIn);
VideoSegment* out = static_cast<VideoSegment*>(aOut);
@ -2440,8 +2487,8 @@ static void MoveToSegment(SourceMediaTrack* aTrack, MediaSegment* aIn,
out->ExtendLastFrameBy(aDesiredUpToTime - out->GetDuration());
}
in->Clear();
MOZ_ASSERT(aIn->GetDuration() == 0, "aIn must be consumed");
}
MOZ_ASSERT(aIn->GetDuration() == 0, "aIn must be consumed");
}
void SourceMediaTrack::ExtractPendingInput(GraphTime aCurrentTime,
@ -2486,23 +2533,9 @@ void SourceMediaTrack::ResampleAudioToGraphSampleRate(MediaSegment* aSegment) {
return;
}
AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
int channels = segment->ChannelCount();
// If this segment is just silence, we delay instanciating the resampler. We
// also need to recreate the resampler if the channel count or input rate
// changes.
if (channels && mUpdateTrack->mResamplerChannelCount != channels) {
SpeexResamplerState* state = speex_resampler_init(
channels, mUpdateTrack->mInputRate, GraphImpl()->GraphRate(),
SPEEX_RESAMPLER_QUALITY_MIN, nullptr);
if (!state) {
return;
}
mUpdateTrack->mResampler.own(state);
mUpdateTrack->mResamplerChannelCount = channels;
}
segment->ResampleChunks(mUpdateTrack->mResampler, mUpdateTrack->mInputRate,
GraphImpl()->GraphRate());
segment->ResampleChunks(mUpdateTrack->mResampler,
&mUpdateTrack->mResamplerChannelCount,
mUpdateTrack->mInputRate, GraphImpl()->GraphRate());
}
void SourceMediaTrack::AdvanceTimeVaryingValuesToCurrentTime(
@ -2700,6 +2733,16 @@ void SourceMediaTrack::RemoveAllDirectListenersImpl() {
mDirectTrackListeners.Clear();
}
void SourceMediaTrack::SetVolume(float aVolume) {
MutexAutoLock lock(mMutex);
mVolume = aVolume;
}
float SourceMediaTrack::GetVolumeLocked() {
mMutex.AssertCurrentThreadOwns();
return mVolume;
}
SourceMediaTrack::~SourceMediaTrack() {}
void MediaInputPort::Init() {
@ -2855,8 +2898,9 @@ MediaTrackGraphImpl::MediaTrackGraphImpl(GraphDriverType aDriverRequested,
uint32_t aChannelCount,
AbstractThread* aMainThread)
: MediaTrackGraph(aSampleRate),
mGraphRunner(aRunTypeRequested == SINGLE_THREAD ? new GraphRunner(this)
: nullptr),
mGraphRunner(aRunTypeRequested == SINGLE_THREAD
? GraphRunner::Create(this)
: already_AddRefed<GraphRunner>(nullptr)),
mFirstCycleBreaker(0)
// An offline graph is not initially processing.
,
@ -2876,7 +2920,6 @@ MediaTrackGraphImpl::MediaTrackGraphImpl(GraphDriverType aDriverRequested,
mTrackOrderDirty(false),
mAbstractMainThread(aMainThread),
mSelfRef(this),
mOutputChannels(aChannelCount),
mGlobalVolume(CubebUtils::GetVolumeScale())
#ifdef DEBUG
,
@ -2885,11 +2928,21 @@ MediaTrackGraphImpl::MediaTrackGraphImpl(GraphDriverType aDriverRequested,
,
mMainThreadGraphTime(0, "MediaTrackGraphImpl::mMainThreadGraphTime"),
mAudioOutputLatency(0.0) {
if (aRunTypeRequested == SINGLE_THREAD && !mGraphRunner) {
// Failed to create thread. Jump to the last phase of the lifecycle.
mDetectedNotRunning = true;
mLifecycleState = LIFECYCLE_WAITING_FOR_TRACK_DESTRUCTION;
#ifdef DEBUG
mCanRunMessagesSynchronously = true;
#endif
return;
}
if (mRealtime) {
if (aDriverRequested == AUDIO_THREAD_DRIVER) {
// Always start with zero input channels, and no particular preferences
// for the input channel.
mDriver = new AudioCallbackDriver(this, 0, AudioInputType::Unknown);
mDriver = new AudioCallbackDriver(this, aChannelCount, 0,
AudioInputType::Unknown);
} else {
mDriver = new SystemClockDriver(this);
}
@ -2902,6 +2955,10 @@ MediaTrackGraphImpl::MediaTrackGraphImpl(GraphDriverType aDriverRequested,
StartAudioCallbackTracing();
RegisterWeakAsyncMemoryReporter(this);
if (!IsNonRealtime()) {
AddShutdownBlocker();
}
}
AbstractThread* MediaTrackGraph::AbstractMainThread() {
@ -2984,10 +3041,6 @@ MediaTrackGraph* MediaTrackGraph::GetInstance(
graph = new MediaTrackGraphImpl(aGraphDriverRequested, runType, sampleRate,
channelCount, mainThread);
if (!graph->IsNonRealtime()) {
graph->AddShutdownBlocker();
}
uint32_t hashkey = WindowToHash(aWindow, sampleRate);
gGraphs.Put(hashkey, graph);
@ -3380,7 +3433,8 @@ void MediaTrackGraphImpl::ApplyAudioContextOperationImpl(
MOZ_ASSERT(nextDriver->AsAudioCallbackDriver());
driver = nextDriver->AsAudioCallbackDriver();
} else {
driver = new AudioCallbackDriver(this, AudioInputChannelCount(),
driver = new AudioCallbackDriver(this, AudioOutputChannelCount(),
AudioInputChannelCount(),
AudioInputDevicePreference());
Monitor2AutoLock lock(mMonitor);
CurrentDriver()->SwitchAtNextIteration(driver);
@ -3483,6 +3537,33 @@ void MediaTrackGraph::ApplyAudioContextOperation(
aDestinationTrack, aTracks, aOperation, aPromise, aFlags));
}
uint32_t MediaTrackGraphImpl::AudioOutputChannelCount() const {
MOZ_ASSERT(OnGraphThread());
// The audio output channel count for a graph is the maximum of the output
// channel count of all the tracks that are in mAudioOutputs.
uint32_t channelCount = 0;
for (auto& tkv : mAudioOutputs) {
MediaTrack* t = tkv.mTrack;
// This is an AudioDestinationNode
if (t->AsAudioNodeTrack()) {
channelCount = std::max<uint32_t>(
channelCount, t->AsAudioNodeTrack()->NumberOfChannels());
} else if (t->GetData<AudioSegment>()) {
AudioSegment* segment = t->GetData<AudioSegment>();
channelCount =
std::max<uint32_t>(channelCount, segment->MaxChannelCount());
}
}
if (channelCount) {
return channelCount;
} else {
if (CurrentDriver()->AsAudioCallbackDriver()) {
return CurrentDriver()->AsAudioCallbackDriver()->OutputChannelCount();
}
return 2;
}
}
double MediaTrackGraph::AudioOutputLatency() {
return static_cast<MediaTrackGraphImpl*>(this)->AudioOutputLatency();
}
@ -3621,4 +3702,9 @@ Watchable<mozilla::GraphTime>& MediaTrackGraphImpl::CurrentTime() {
return mMainThreadGraphTime;
}
GraphTime MediaTrackGraph::ProcessedTime() const {
AssertOnGraphThreadOrNotRunning();
return static_cast<const MediaTrackGraphImpl*>(this)->mProcessedTime;
}
} // namespace mozilla

View File

@ -5,7 +5,8 @@
#ifndef MOZILLA_MEDIATRACKGRAPH_H_
#define MOZILLA_MEDIATRACKGRAPH_H_
#include "AudioStream.h"
#include "AudioSampleFormat.h"
#include "CubebUtils.h"
#include "MainThreadUtils.h"
#include "MediaSegment.h"
#include "mozilla/LinkedList.h"
@ -302,12 +303,6 @@ class MediaTrack : public mozilla::LinkedListElement<MediaTrack> {
void SetGraphImpl(MediaTrackGraph* aGraph);
// Control API.
// Since a track can be played multiple ways, we need to combine independent
// volume settings. The aKey parameter is used to keep volume settings
// separate. Since the track is always playing the same contents, only
// a single audio output track is used; the volumes are combined.
// Currently only the first enabled audio track is played.
// XXX change this so all enabled audio tracks are mixed and played.
virtual void AddAudioOutput(void* aKey);
virtual void SetAudioOutputVolume(void* aKey, float aVolume);
virtual void RemoveAudioOutput(void* aKey);
@ -556,12 +551,6 @@ class MediaTrack : public mozilla::LinkedListElement<MediaTrack> {
bool mNotifiedEnded;
// Client-set volume of this track
struct AudioOutput {
explicit AudioOutput(void* aKey) : mKey(aKey), mVolume(1.0f) {}
void* mKey;
float mVolume;
};
nsTArray<AudioOutput> mAudioOutputs;
nsTArray<RefPtr<MediaTrackListener>> mTrackListeners;
nsTArray<MainThreadMediaTrackListener*> mMainThreadListeners;
// This track's associated disabled mode. It can either by disabled by frames
@ -577,20 +566,6 @@ class MediaTrack : public mozilla::LinkedListElement<MediaTrack> {
// MediaInputPorts to which this is connected
nsTArray<MediaInputPort*> mConsumers;
// Where audio output is going. There is one AudioOutputStream per
// Type::AUDIO MediaTrack.
struct AudioOutputStream {
// When we started audio playback for this track.
// Add mTrack->GetPosition() to find the current audio playback position.
GraphTime mAudioPlaybackStartTime;
// Amount of time that we've wanted to play silence because of the track
// blocking.
MediaTime mBlockedAudioTime;
// Last tick written to the audio output.
TrackTime mLastTickWritten;
};
UniquePtr<AudioOutputStream> mAudioOutputStream;
/**
* Number of outstanding suspend operations on this track. Track is
* suspended when this is > 0.
@ -706,6 +681,11 @@ class SourceMediaTrack : public MediaTrack {
void RemoveAllDirectListenersImpl() override;
// The value set here is applied in MoveToSegment so we can avoid the
// buffering delay in applying the change. See Bug 1443511.
void SetVolume(float aVolume);
float GetVolumeLocked();
friend class MediaTrackGraphImpl;
protected:
@ -722,7 +702,7 @@ class SourceMediaTrack : public MediaTrack {
// Resampler if the rate of the input track does not match the
// MediaTrackGraph's.
nsAutoRef<SpeexResamplerState> mResampler;
int mResamplerChannelCount;
uint32_t mResamplerChannelCount;
// Each time the track updates are flushed to the media graph thread,
// the segment buffer is emptied.
UniquePtr<MediaSegment> mData;
@ -761,6 +741,7 @@ class SourceMediaTrack : public MediaTrack {
// held together.
Mutex mMutex;
// protected by mMutex
float mVolume = 1.0;
UniquePtr<TrackData> mUpdateTrack;
nsTArray<RefPtr<DirectMediaTrackListener>> mDirectTrackListeners;
};
@ -1058,6 +1039,7 @@ class MediaTrackGraph {
AudioDataListener* aListener) = 0;
virtual void CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID,
AudioDataListener* aListener) = 0;
// Control API.
/**
* Create a track that a media decoder (or some other source of
@ -1147,6 +1129,12 @@ class MediaTrackGraph {
*/
virtual Watchable<GraphTime>& CurrentTime() = 0;
/**
* Graph thread function to return the time at which all processing has been
* completed. Some tracks may have performed processing beyond this time.
*/
GraphTime ProcessedTime() const;
protected:
explicit MediaTrackGraph(TrackRate aSampleRate) : mSampleRate(aSampleRate) {
MOZ_COUNT_CTOR(MediaTrackGraph);

View File

@ -385,7 +385,13 @@ class MediaTrackGraphImpl : public MediaTrackGraph,
* Queue audio (mix of track audio and silence for blocked intervals)
* to the audio output track. Returns the number of frames played.
*/
TrackTime PlayAudio(MediaTrack* aTrack);
struct TrackKeyAndVolume {
MediaTrack* mTrack;
void* mKey;
float mVolume;
};
TrackTime PlayAudio(const TrackKeyAndVolume& aTkv, GraphTime aPlayedTime);
/* Runs off a message on the graph thread when something requests audio from
* an input audio device of ID aID, and delivers the input audio frames to
* aListener. */
@ -405,6 +411,14 @@ class MediaTrackGraphImpl : public MediaTrackGraph,
* audio from this audio input device. */
virtual void CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID,
AudioDataListener* aListener) override;
/* Add or remove an audio output for this track. All tracks that have an
* audio output are mixed and written to a single audio output stream. */
void RegisterAudioOutput(MediaTrack* aTrack, void* aKey);
void UnregisterAudioOutput(MediaTrack* aTrack, void* aKey);
void UnregisterAllAudioOutputs(MediaTrack* aTrack);
void SetAudioOutputVolume(MediaTrack* aTrack, void* aKey, float aVolume);
/* Called on the graph thread when the input device settings should be
* reevaluated, for example, if the channel count of the input track should
* be changed. */
@ -466,7 +480,7 @@ class MediaTrackGraphImpl : public MediaTrackGraph,
mTrackOrderDirty = true;
}
uint32_t AudioOutputChannelCount() const { return mOutputChannels; }
uint32_t AudioOutputChannelCount() const;
double AudioOutputLatency();
@ -660,7 +674,7 @@ class MediaTrackGraphImpl : public MediaTrackGraph,
* If set, the GraphRunner class handles handing over data from audio
* callbacks to a common single thread, shared across GraphDrivers.
*/
const UniquePtr<GraphRunner> mGraphRunner;
const RefPtr<GraphRunner> mGraphRunner;
/**
* Main-thread view of the number of tracks in this graph, for lifetime
@ -954,11 +968,11 @@ class MediaTrackGraphImpl : public MediaTrackGraph,
* Track for window audio capture.
*/
nsTArray<WindowAndTrack> mWindowCaptureTracks;
/**
* Number of channels on output.
* Tracks that have their audio output mixed and written to an audio output
* device.
*/
const uint32_t mOutputChannels;
nsTArray<TrackKeyAndVolume> mAudioOutputs;
/**
* Global volume scale. Used when running tests so that the output is not too

View File

@ -91,7 +91,7 @@ class MediaTrackListener {
/**
* Notify that this track listener has been removed from the graph, either
* after shutdown or RemoveTrackListener.
* after shutdown or through MediaTrack::RemoveListener().
*/
virtual void NotifyRemoved(MediaTrackGraph* aGraph) {}

View File

@ -3,8 +3,13 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VideoFrameContainer.h"
#include "mozilla/Telemetry.h"
#ifdef MOZ_WIDGET_ANDROID
#include "GLImages.h" // for SurfaceTextureImage
#endif
#include "MediaDecoderOwner.h"
#include "mozilla/Telemetry.h"
#include "mozilla/AbstractThread.h"
using namespace mozilla::layers;
@ -76,9 +81,27 @@ void VideoFrameContainer::UpdatePrincipalHandleForFrameIDLocked(
mFrameIDForPendingPrincipalHandle = aFrameID;
}
#ifdef MOZ_WIDGET_ANDROID
static void NotifySetCurrent(Image* aImage) {
if (aImage == nullptr) {
return;
}
SurfaceTextureImage* image = aImage->AsSurfaceTextureImage();
if (image == nullptr) {
return;
}
image->OnSetCurrent();
}
#endif
void VideoFrameContainer::SetCurrentFrame(const gfx::IntSize& aIntrinsicSize,
Image* aImage,
const TimeStamp& aTargetTime) {
#ifdef MOZ_WIDGET_ANDROID
NotifySetCurrent(aImage);
#endif
if (aImage) {
MutexAutoLock lock(mMutex);
AutoTArray<ImageContainer::NonOwningImage, 1> imageList;
@ -93,6 +116,15 @@ void VideoFrameContainer::SetCurrentFrame(const gfx::IntSize& aIntrinsicSize,
void VideoFrameContainer::SetCurrentFrames(
const gfx::IntSize& aIntrinsicSize,
const nsTArray<ImageContainer::NonOwningImage>& aImages) {
#ifdef MOZ_WIDGET_ANDROID
// When there are multiple frames, only the last one is effective
// (see bug 1299068 comment 4). Here I just count on VideoSink and VideoOutput
// to send one frame at a time and warn if not.
Unused << NS_WARN_IF(aImages.Length() > 1);
for (auto& image : aImages) {
NotifySetCurrent(image.mImage);
}
#endif
MutexAutoLock lock(mMutex);
SetCurrentFramesLocked(aIntrinsicSize, aImages);
}

View File

@ -6,7 +6,7 @@
#define ContainerWriter_h_
#include "nsTArray.h"
#include "EncodedFrameContainer.h"
#include "EncodedFrame.h"
#include "TrackMetadataBase.h"
namespace mozilla {
@ -25,23 +25,26 @@ class ContainerWriter {
enum { END_OF_STREAM = 1 << 0 };
/**
* Writes encoded track data from aBuffer to a packet, and insert this packet
* into the internal stream of container writer. aDuration is the playback
* duration of this packet in number of samples. aFlags is true with
* END_OF_STREAM if this is the last packet of track.
* Currently, WriteEncodedTrack doesn't support multiple tracks.
* Writes encoded track data from aData into the internal stream of container
* writer. aFlags is used to signal the impl of different conditions
* such as END_OF_STREAM. Each impl may handle different flags, and should be
* documented accordingly. Currently, WriteEncodedTrack doesn't support
* explicit track specification, though each impl may provide logic to
* allocate frames into different tracks.
*/
virtual nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags = 0) = 0;
virtual nsresult WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags = 0) = 0;
/**
* Set the meta data pointer into muxer
* This function will check the integrity of aMetadata.
* If the meta data isn't well format, this function will return
* NS_ERROR_FAILURE to caller, else save the pointer to mMetadata and return
* Stores the metadata for all given tracks to the muxer.
*
* This method checks the integrity of aMetadata.
* If the metadata isn't well formatted, this method returns NS_ERROR_FAILURE.
* If the metadata is well formatted, it stores the metadata and returns
* NS_OK.
*/
virtual nsresult SetMetadata(TrackMetadataBase* aMetadata) = 0;
virtual nsresult SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) = 0;
/**
* Indicate if the writer has finished to output data
@ -58,7 +61,7 @@ class ContainerWriter {
* even it is not full, and copy these container data to a buffer for
* aOutputBufs to append.
*/
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags = 0) = 0;
protected:

View File

@ -0,0 +1,70 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrame_h_
#define EncodedFrame_h_
#include "nsISupportsImpl.h"
#include "VideoUtils.h"
namespace mozilla {
// Represent an encoded frame emitted by an encoder
class EncodedFrame final {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
EncodedFrame() : mTime(0), mDuration(0), mFrameType(UNKNOWN) {}
enum FrameType {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
UNKNOWN // FrameType not set
};
void SwapInFrameData(nsTArray<uint8_t>& aData) {
mFrameData.SwapElements(aData);
}
nsresult SwapOutFrameData(nsTArray<uint8_t>& aData) {
if (mFrameType != UNKNOWN) {
// Reset this frame type to UNKNOWN once the data is swapped out.
mFrameData.SwapElements(aData);
mFrameType = UNKNOWN;
return NS_OK;
}
return NS_ERROR_FAILURE;
}
const nsTArray<uint8_t>& GetFrameData() const { return mFrameData; }
// Timestamp in microseconds
uint64_t mTime;
// The playback duration of this packet. The unit is determined by the use
// case. For VP8 the unit should be microseconds. For opus this is the number
// of samples.
uint64_t mDuration;
// Represent what is in the FrameData
FrameType mFrameType;
uint64_t GetEndTime() const {
// Defend against untested types. This assert can be removed but we want
// to make sure other types are correctly accounted for.
MOZ_ASSERT(mFrameType == OPUS_AUDIO_FRAME || mFrameType == VP8_I_FRAME ||
mFrameType == VP8_P_FRAME);
if (mFrameType == OPUS_AUDIO_FRAME) {
// See bug 1356054 for discussion around standardization of time units
// (can remove videoutils import when this goes)
return mTime + FramesToUsecs(mDuration, 48000).value();
} else {
return mTime + mDuration;
}
}
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame() {}
// Encoded data
nsTArray<uint8_t> mFrameData;
};
} // namespace mozilla
#endif // EncodedFrame_h_

View File

@ -1,96 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrameContainer_H_
#define EncodedFrameContainer_H_
#include "nsTArray.h"
namespace mozilla {
class EncodedFrame;
/*
* This container is used to carry video or audio encoded data from encoder to
* muxer. The media data object is created by encoder and recycle by the
* destructor. Only allow to store audio or video encoded data in EncodedData.
*/
class EncodedFrameContainer {
public:
// Append encoded frame data
void AppendEncodedFrame(EncodedFrame* aEncodedFrame) {
mEncodedFrames.AppendElement(aEncodedFrame);
}
// Retrieve all of the encoded frames
const nsTArray<RefPtr<EncodedFrame> >& GetEncodedFrames() const {
return mEncodedFrames;
}
private:
// This container is used to store the video or audio encoded packets.
// Muxer should check mFrameType and get the encoded data type from
// mEncodedFrames.
nsTArray<RefPtr<EncodedFrame> > mEncodedFrames;
};
// Represent one encoded frame
class EncodedFrame final {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
EncodedFrame() : mTimeStamp(0), mDuration(0), mFrameType(UNKNOWN) {}
enum FrameType {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
VORBIS_AUDIO_FRAME,
AVC_I_FRAME,
AVC_P_FRAME,
AVC_B_FRAME,
AVC_CSD, // AVC codec specific data
AAC_AUDIO_FRAME,
AAC_CSD, // AAC codec specific data
AMR_AUDIO_CSD,
AMR_AUDIO_FRAME,
EVRC_AUDIO_CSD,
EVRC_AUDIO_FRAME,
UNKNOWN // FrameType not set
};
void SwapInFrameData(nsTArray<uint8_t>& aData) {
mFrameData.SwapElements(aData);
}
nsresult SwapOutFrameData(nsTArray<uint8_t>& aData) {
if (mFrameType != UNKNOWN) {
// Reset this frame type to UNKNOWN once the data is swapped out.
mFrameData.SwapElements(aData);
mFrameType = UNKNOWN;
return NS_OK;
}
return NS_ERROR_FAILURE;
}
const nsTArray<uint8_t>& GetFrameData() const { return mFrameData; }
uint64_t GetTimeStamp() const { return mTimeStamp; }
void SetTimeStamp(uint64_t aTimeStamp) { mTimeStamp = aTimeStamp; }
uint64_t GetDuration() const { return mDuration; }
void SetDuration(uint64_t aDuration) { mDuration = aDuration; }
FrameType GetFrameType() const { return mFrameType; }
void SetFrameType(FrameType aFrameType) { mFrameType = aFrameType; }
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame() {}
// Encoded data
nsTArray<uint8_t> mFrameData;
uint64_t mTimeStamp;
// The playback duration of this packet in number of samples
uint64_t mDuration;
// Represent what is in the FrameData
FrameType mFrameType;
};
} // namespace mozilla
#endif

View File

@ -18,12 +18,12 @@
#include "mozilla/dom/VideoStreamTrack.h"
#include "mozilla/gfx/Point.h" // IntSize
#include "mozilla/Logging.h"
#include "mozilla/media/MediaUtils.h"
#include "mozilla/Preferences.h"
#include "mozilla/StaticPrefs_media.h"
#include "mozilla/StaticPtr.h"
#include "mozilla/TaskQueue.h"
#include "mozilla/Unused.h"
#include "Muxer.h"
#include "nsMimeTypes.h"
#include "nsThreadUtils.h"
#include "OggWriter.h"
@ -36,10 +36,6 @@
# include "WebMWriter.h"
#endif
#ifdef LOG
# undef LOG
#endif
mozilla::LazyLogModule gMediaEncoderLog("MediaEncoder");
#define LOG(type, msg) MOZ_LOG(gMediaEncoderLog, type, msg)
@ -57,13 +53,12 @@ class MediaEncoder::AudioTrackListener : public DirectMediaTrackListener {
mRemoved(false),
mDriftCompensator(aDriftCompensator),
mEncoder(aEncoder),
mEncoderThread(aEncoderThread) {
mEncoderThread(aEncoderThread),
mShutdownPromise(mShutdownHolder.Ensure(__func__)) {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
}
void NotifyShutdown() { mShutdown = true; }
void NotifyDirectListenerInstalled(InstallationResult aResult) override {
if (aResult == InstallationResult::SUCCESS) {
LOG(LogLevel::Info, ("Audio track direct listener installed"));
@ -89,10 +84,6 @@ class MediaEncoder::AudioTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
if (mShutdown) {
return;
}
if (!mInitialized) {
mDriftCompensator->NotifyAudioStart(TimeStamp::Now());
mInitialized = true;
@ -117,10 +108,6 @@ class MediaEncoder::AudioTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
if (mShutdown) {
return;
}
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::AudioTrackEncoder::NotifyEndOfStream",
mEncoder, &AudioTrackEncoder::NotifyEndOfStream));
@ -129,13 +116,11 @@ class MediaEncoder::AudioTrackListener : public DirectMediaTrackListener {
}
void NotifyRemoved(MediaTrackGraph* aGraph) override {
if (!mShutdown) {
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::AudioTrackEncoder::NotifyEndOfStream",
mEncoder, &AudioTrackEncoder::NotifyEndOfStream));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
}
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::AudioTrackEncoder::NotifyEndOfStream",
mEncoder, &AudioTrackEncoder::NotifyEndOfStream));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
mRemoved = true;
@ -143,17 +128,23 @@ class MediaEncoder::AudioTrackListener : public DirectMediaTrackListener {
mEncoder = nullptr;
mEncoderThread = nullptr;
}
mShutdownHolder.Resolve(true, __func__);
}
const RefPtr<GenericNonExclusivePromise>& OnShutdown() const {
return mShutdownPromise;
}
private:
// True when MediaEncoder has shutdown and destroyed the TaskQueue.
Atomic<bool> mShutdown;
bool mDirectConnected;
bool mInitialized;
bool mRemoved;
const RefPtr<DriftCompensator> mDriftCompensator;
RefPtr<AudioTrackEncoder> mEncoder;
RefPtr<TaskQueue> mEncoderThread;
MozPromiseHolder<GenericNonExclusivePromise> mShutdownHolder;
const RefPtr<GenericNonExclusivePromise> mShutdownPromise;
};
class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
@ -163,13 +154,12 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
mInitialized(false),
mRemoved(false),
mEncoder(aEncoder),
mEncoderThread(aEncoderThread) {
mEncoderThread(aEncoderThread),
mShutdownPromise(mShutdownHolder.Ensure(__func__)) {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
}
void NotifyShutdown() { mShutdown = true; }
void NotifyDirectListenerInstalled(InstallationResult aResult) override {
if (aResult == InstallationResult::SUCCESS) {
LOG(LogLevel::Info, ("Video track direct listener installed"));
@ -196,10 +186,6 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
if (mShutdown) {
return;
}
const TimeStamp now = TimeStamp::Now();
if (!mInitialized) {
nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<TimeStamp>(
@ -224,10 +210,6 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoderThread);
MOZ_ASSERT(aMedia.GetType() == MediaSegment::VIDEO);
if (mShutdown) {
return;
}
const VideoSegment& video = static_cast<const VideoSegment&>(aMedia);
VideoSegment copy;
for (VideoSegment::ConstChunkIterator iter(video); !iter.IsEnded();
@ -251,10 +233,6 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
if (mShutdown) {
return;
}
nsresult rv;
if (aEnabled) {
rv = mEncoderThread->Dispatch(NewRunnableMethod<TimeStamp>(
@ -273,10 +251,6 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
MOZ_ASSERT(mEncoder);
MOZ_ASSERT(mEncoderThread);
if (mShutdown) {
return;
}
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::VideoTrackEncoder::NotifyEndOfStream",
mEncoder, &VideoTrackEncoder::NotifyEndOfStream));
@ -285,13 +259,11 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
}
void NotifyRemoved(MediaTrackGraph* aGraph) override {
if (!mShutdown) {
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::VideoTrackEncoder::NotifyEndOfStream",
mEncoder, &VideoTrackEncoder::NotifyEndOfStream));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
}
nsresult rv = mEncoderThread->Dispatch(
NewRunnableMethod("mozilla::VideoTrackEncoder::NotifyEndOfStream",
mEncoder, &VideoTrackEncoder::NotifyEndOfStream));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
mRemoved = true;
@ -299,16 +271,22 @@ class MediaEncoder::VideoTrackListener : public DirectMediaTrackListener {
mEncoder = nullptr;
mEncoderThread = nullptr;
}
mShutdownHolder.Resolve(true, __func__);
}
const RefPtr<GenericNonExclusivePromise>& OnShutdown() const {
return mShutdownPromise;
}
private:
// True when MediaEncoder has shutdown and destroyed the TaskQueue.
Atomic<bool> mShutdown;
bool mDirectConnected;
bool mInitialized;
bool mRemoved;
RefPtr<VideoTrackEncoder> mEncoder;
RefPtr<TaskQueue> mEncoderThread;
MozPromiseHolder<GenericNonExclusivePromise> mShutdownHolder;
const RefPtr<GenericNonExclusivePromise> mShutdownPromise;
};
class MediaEncoder::EncoderListener : public TrackEncoderListener {
@ -396,18 +374,15 @@ MediaEncoder::MediaEncoder(TaskQueue* aEncoderThread,
VideoTrackEncoder* aVideoEncoder,
TrackRate aTrackRate, const nsAString& aMIMEType)
: mEncoderThread(aEncoderThread),
mWriter(std::move(aWriter)),
mMuxer(MakeUnique<Muxer>(std::move(aWriter))),
mAudioEncoder(aAudioEncoder),
mVideoEncoder(aVideoEncoder),
mEncoderListener(MakeAndAddRef<EncoderListener>(mEncoderThread, this)),
mStartTime(TimeStamp::Now()),
mMIMEType(aMIMEType),
mInitialized(false),
mMetadataEncoded(false),
mCompleted(false),
mError(false),
mCanceled(false),
mShutdown(false) {
mError(false) {
if (mAudioEncoder) {
mAudioListener = MakeAndAddRef<AudioTrackListener>(
aDriftCompensator, mAudioEncoder, mEncoderThread);
@ -430,7 +405,14 @@ MediaEncoder::MediaEncoder(TaskQueue* aEncoderThread,
}
}
MediaEncoder::~MediaEncoder() { MOZ_ASSERT(mListeners.IsEmpty()); }
MediaEncoder::~MediaEncoder() {
MOZ_ASSERT(mListeners.IsEmpty());
MOZ_ASSERT(!mAudioTrack);
MOZ_ASSERT(!mVideoTrack);
MOZ_ASSERT(!mAudioNode);
MOZ_ASSERT(!mInputPort);
MOZ_ASSERT(!mPipeStream);
}
void MediaEncoder::EnsureGraphTrackFrom(MediaTrack* aTrack) {
if (mGraphTrack) {
@ -543,18 +525,16 @@ void MediaEncoder::ConnectMediaStreamTrack(MediaStreamTrack* aTrack) {
if (AudioStreamTrack* audio = aTrack->AsAudioStreamTrack()) {
if (!mAudioEncoder) {
MOZ_ASSERT(false, "No audio encoder for this audio track");
return;
}
if (mAudioTrack) {
MOZ_ASSERT(false, "Only one audio track supported.");
return;
}
if (!mAudioListener) {
MOZ_ASSERT(false, "No audio listener for this audio track");
// No audio encoder for this audio track. It could be disabled.
LOG(LogLevel::Warning, ("Cannot connect to audio track - no encoder"));
return;
}
MOZ_ASSERT(!mAudioTrack, "Only one audio track supported.");
MOZ_ASSERT(mAudioListener, "No audio listener for this audio track");
LOG(LogLevel::Info, ("Connected to audio track %p", aTrack));
mAudioTrack = audio;
// With full duplex we don't risk having audio come in late to the MTG
// so we won't need a direct listener.
@ -566,18 +546,16 @@ void MediaEncoder::ConnectMediaStreamTrack(MediaStreamTrack* aTrack) {
audio->AddListener(mAudioListener);
} else if (VideoStreamTrack* video = aTrack->AsVideoStreamTrack()) {
if (!mVideoEncoder) {
MOZ_ASSERT(false, "No video encoder for this video track");
return;
}
if (mVideoTrack) {
MOZ_ASSERT(false, "Only one video track supported.");
return;
}
if (!mVideoListener) {
MOZ_ASSERT(false, "No video listener for this audio track");
// No video encoder for this video track. It could be disabled.
LOG(LogLevel::Warning, ("Cannot connect to video track - no encoder"));
return;
}
MOZ_ASSERT(!mVideoTrack, "Only one video track supported.");
MOZ_ASSERT(mVideoListener, "No video listener for this video track");
LOG(LogLevel::Info, ("Connected to video track %p", aTrack));
mVideoTrack = video;
video->AddDirectListener(mVideoListener);
video->AddListener(mVideoListener);
@ -629,21 +607,18 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
RefPtr<VideoTrackEncoder> videoEncoder;
auto driftCompensator =
MakeRefPtr<DriftCompensator>(aEncoderThread, aTrackRate);
nsString mimeType;
if (!aTrackTypes) {
MOZ_ASSERT(false);
LOG(LogLevel::Error, ("No TrackTypes"));
Maybe<MediaContainerType> mimeType = MakeMediaContainerType(aMIMEType);
if (!mimeType) {
return nullptr;
}
#ifdef MOZ_WEBM_ENCODER
else if (MediaEncoder::IsWebMEncoderEnabled() &&
aMIMEType.EqualsLiteral(VIDEO_WEBM)) {
if (aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK &&
MediaDecoder::IsOpusEnabled()) {
for (const auto& codec : mimeType->ExtendedType().Codecs().Range()) {
if (codec.EqualsLiteral("opus")) {
MOZ_ASSERT(!audioEncoder);
audioEncoder = MakeAndAddRef<OpusTrackEncoder>(aTrackRate);
}
if (aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK) {
} else if (codec.EqualsLiteral("vp8") || codec.EqualsLiteral("vp8.0")) {
MOZ_ASSERT(!videoEncoder);
if (Preferences::GetBool("media.recorder.video.frame_drops", true)) {
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::ALLOW);
@ -651,75 +626,31 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::DISALLOW);
}
}
writer = MakeUnique<WebMWriter>(aTrackTypes);
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
} else if (MediaEncoder::IsWebMEncoderEnabled() &&
aMIMEType.EqualsLiteral(AUDIO_WEBM) &&
aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK) {
if (aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK &&
MediaDecoder::IsOpusEnabled()) {
audioEncoder = MakeAndAddRef<OpusTrackEncoder>(aTrackRate);
}
if (aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK) {
if (Preferences::GetBool("media.recorder.video.frame_drops", true)) {
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::ALLOW);
} else {
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::DISALLOW);
}
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
} else {
mimeType = NS_LITERAL_STRING(AUDIO_WEBM);
MOZ_CRASH("Unknown codec");
}
writer = MakeUnique<WebMWriter>(aTrackTypes);
}
#endif // MOZ_WEBM_ENCODER
else if (MediaDecoder::IsOggEnabled() && MediaDecoder::IsOpusEnabled() &&
aMIMEType.EqualsLiteral(AUDIO_OGG) &&
aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK) {
writer = MakeUnique<OggWriter>();
audioEncoder = MakeAndAddRef<OpusTrackEncoder>(aTrackRate);
mimeType = NS_LITERAL_STRING(AUDIO_OGG);
}
if (mimeType->Type() == MEDIAMIMETYPE(VIDEO_WEBM) ||
mimeType->Type() == MEDIAMIMETYPE(AUDIO_WEBM)) {
#ifdef MOZ_WEBM_ENCODER
else if (MediaEncoder::IsWebMEncoderEnabled() &&
(aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK ||
!MediaDecoder::IsOggEnabled())) {
if (aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK &&
MediaDecoder::IsOpusEnabled()) {
audioEncoder = MakeAndAddRef<OpusTrackEncoder>(aTrackRate);
}
if (aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK) {
if (Preferences::GetBool("media.recorder.video.frame_drops", true)) {
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::ALLOW);
} else {
videoEncoder = MakeAndAddRef<VP8TrackEncoder>(
driftCompensator, aTrackRate, FrameDroppingMode::DISALLOW);
}
}
writer = MakeUnique<WebMWriter>(aTrackTypes);
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
}
MOZ_ASSERT_IF(mimeType->Type() == MEDIAMIMETYPE(AUDIO_WEBM), !videoEncoder);
writer = MakeUnique<WebMWriter>();
#else
MOZ_CRASH("Webm cannot be selected if not supported");
#endif // MOZ_WEBM_ENCODER
else if (MediaDecoder::IsOggEnabled() && MediaDecoder::IsOpusEnabled() &&
aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK) {
} else if (mimeType->Type() == MEDIAMIMETYPE(AUDIO_OGG)) {
MOZ_ASSERT(audioEncoder);
MOZ_ASSERT(!videoEncoder);
writer = MakeUnique<OggWriter>();
audioEncoder = MakeAndAddRef<OpusTrackEncoder>(aTrackRate);
mimeType = NS_LITERAL_STRING(AUDIO_OGG);
} else {
LOG(LogLevel::Error,
("Can not find any encoder to record this media stream"));
return nullptr;
}
NS_ENSURE_TRUE(writer, nullptr);
LOG(LogLevel::Info,
("Create encoder result:a[%p](%u bps) v[%p](%u bps) w[%p] mimeType = "
"%s.",
audioEncoder.get(), aAudioBitrate, videoEncoder.get(), aVideoBitrate,
writer.get(), NS_ConvertUTF16toUTF8(mimeType).get()));
writer.get(), NS_ConvertUTF16toUTF8(aMIMEType).get()));
if (audioEncoder) {
audioEncoder->SetWorkerThread(aEncoderThread);
@ -735,77 +666,7 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
}
return MakeAndAddRef<MediaEncoder>(
aEncoderThread, std::move(driftCompensator), std::move(writer),
audioEncoder, videoEncoder, aTrackRate, mimeType);
}
nsresult MediaEncoder::GetEncodedMetadata(
nsTArray<nsTArray<uint8_t>>* aOutputBufs, nsAString& aMIMEType) {
AUTO_PROFILER_LABEL("MediaEncoder::GetEncodedMetadata", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (mShutdown) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (!mInitialized) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (mMetadataEncoded) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
aMIMEType = mMIMEType;
LOG(LogLevel::Verbose,
("GetEncodedMetadata TimeStamp = %f", GetEncodeTimeStamp()));
nsresult rv;
if (mAudioEncoder) {
if (!mAudioEncoder->IsInitialized()) {
LOG(LogLevel::Error,
("GetEncodedMetadata Audio encoder not initialized"));
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
rv = CopyMetadataToMuxer(mAudioEncoder);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Failed to Set Audio Metadata"));
SetError();
return rv;
}
}
if (mVideoEncoder) {
if (!mVideoEncoder->IsInitialized()) {
LOG(LogLevel::Error,
("GetEncodedMetadata Video encoder not initialized"));
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
rv = CopyMetadataToMuxer(mVideoEncoder.get());
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Failed to Set Video Metadata"));
SetError();
return rv;
}
}
rv = mWriter->GetContainerData(aOutputBufs, ContainerWriter::GET_HEADER);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Writer fail to generate header!"));
SetError();
return rv;
}
LOG(LogLevel::Verbose,
("Finish GetEncodedMetadata TimeStamp = %f", GetEncodeTimeStamp()));
mMetadataEncoded = true;
return NS_OK;
audioEncoder, videoEncoder, aTrackRate, aMIMEType);
}
nsresult MediaEncoder::GetEncodedData(
@ -813,47 +674,73 @@ nsresult MediaEncoder::GetEncodedData(
AUTO_PROFILER_LABEL("MediaEncoder::GetEncodedData", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (!mMetadataEncoded) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
MOZ_ASSERT(mInitialized);
MOZ_ASSERT_IF(mAudioEncoder, mAudioEncoder->IsInitialized());
MOZ_ASSERT_IF(mVideoEncoder, mVideoEncoder->IsInitialized());
nsresult rv;
LOG(LogLevel::Verbose,
("GetEncodedData TimeStamp = %f", GetEncodeTimeStamp()));
EncodedFrameContainer encodedData;
if (mVideoEncoder) {
// We're most likely to actually wait for a video frame, so do that first
// to minimize capture offset/lipsync issues.
rv = WriteEncodedDataToMuxer(mVideoEncoder);
LOG(LogLevel::Verbose,
("Video encoded TimeStamp = %f", GetEncodeTimeStamp()));
if (mMuxer->NeedsMetadata()) {
nsTArray<RefPtr<TrackMetadataBase>> meta;
if (mAudioEncoder && !*meta.AppendElement(mAudioEncoder->GetMetadata())) {
LOG(LogLevel::Error, ("Audio metadata is null"));
SetError();
return NS_ERROR_ABORT;
}
if (mVideoEncoder && !*meta.AppendElement(mVideoEncoder->GetMetadata())) {
LOG(LogLevel::Error, ("Video metadata is null"));
SetError();
return NS_ERROR_ABORT;
}
rv = mMuxer->SetMetadata(meta);
if (NS_FAILED(rv)) {
LOG(LogLevel::Warning, ("Failed to write encoded video data to muxer"));
LOG(LogLevel::Error, ("SetMetadata failed"));
SetError();
return rv;
}
}
if (mAudioEncoder) {
rv = WriteEncodedDataToMuxer(mAudioEncoder);
LOG(LogLevel::Verbose,
("Audio encoded TimeStamp = %f", GetEncodeTimeStamp()));
// First, feed encoded data from encoders to muxer.
if (mVideoEncoder && !mVideoEncoder->IsEncodingComplete()) {
nsTArray<RefPtr<EncodedFrame>> videoFrames;
rv = mVideoEncoder->GetEncodedTrack(videoFrames);
if (NS_FAILED(rv)) {
LOG(LogLevel::Warning, ("Failed to write encoded audio data to muxer"));
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from video encoder."));
return rv;
}
for (const RefPtr<EncodedFrame>& frame : videoFrames) {
mMuxer->AddEncodedVideoFrame(frame);
}
if (mVideoEncoder->IsEncodingComplete()) {
mMuxer->VideoEndOfStream();
}
}
// In audio only or video only case, let unavailable track's flag to be
// true.
bool isAudioCompleted = !mAudioEncoder || mAudioEncoder->IsEncodingComplete();
bool isVideoCompleted = !mVideoEncoder || mVideoEncoder->IsEncodingComplete();
rv = mWriter->GetContainerData(
aOutputBufs,
isAudioCompleted && isVideoCompleted ? ContainerWriter::FLUSH_NEEDED : 0);
if (mWriter->IsWritingComplete()) {
if (mAudioEncoder && !mAudioEncoder->IsEncodingComplete()) {
nsTArray<RefPtr<EncodedFrame>> audioFrames;
rv = mAudioEncoder->GetEncodedTrack(audioFrames);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from audio encoder."));
return rv;
}
for (const RefPtr<EncodedFrame>& frame : audioFrames) {
mMuxer->AddEncodedAudioFrame(frame);
}
if (mAudioEncoder->IsEncodingComplete()) {
mMuxer->AudioEndOfStream();
}
}
// Second, get data from muxer. This will do the actual muxing.
rv = mMuxer->GetData(aOutputBufs);
if (mMuxer->IsFinished()) {
mCompleted = true;
Shutdown();
}
@ -861,38 +748,28 @@ nsresult MediaEncoder::GetEncodedData(
LOG(LogLevel::Verbose,
("END GetEncodedData TimeStamp=%f "
"mCompleted=%d, aComplete=%d, vComplete=%d",
GetEncodeTimeStamp(), mCompleted, isAudioCompleted, isVideoCompleted));
GetEncodeTimeStamp(), mCompleted,
!mAudioEncoder || mAudioEncoder->IsEncodingComplete(),
!mVideoEncoder || mVideoEncoder->IsEncodingComplete()));
return rv;
}
void MediaEncoder::Shutdown() {
RefPtr<GenericNonExclusivePromise::AllPromiseType> MediaEncoder::Shutdown() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (mShutdown) {
return;
if (mShutdownPromise) {
return mShutdownPromise;
}
mShutdown = true;
LOG(LogLevel::Info, ("MediaEncoder has been shut down."));
LOG(LogLevel::Info, ("MediaEncoder is shutting down."));
if (mAudioEncoder) {
mAudioEncoder->UnregisterListener(mEncoderListener);
}
if (mAudioListener) {
mAudioListener->NotifyShutdown();
}
if (mVideoEncoder) {
mVideoEncoder->UnregisterListener(mEncoderListener);
}
if (mVideoListener) {
mVideoListener->NotifyShutdown();
}
mEncoderListener->Forget();
if (mCanceled) {
// Shutting down after being canceled. We cannot use the encoder thread.
return;
}
auto listeners(mListeners);
for (auto& l : listeners) {
// We dispatch here since this method is typically called from
@ -903,89 +780,34 @@ void MediaEncoder::Shutdown() {
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
}
AutoTArray<RefPtr<GenericNonExclusivePromise>, 2> shutdownPromises;
if (mAudioListener) {
shutdownPromises.AppendElement(mAudioListener->OnShutdown());
}
if (mVideoListener) {
shutdownPromises.AppendElement(mVideoListener->OnShutdown());
}
return mShutdownPromise =
GenericNonExclusivePromise::All(mEncoderThread, shutdownPromises);
}
nsresult MediaEncoder::WriteEncodedDataToMuxer(TrackEncoder* aTrackEncoder) {
AUTO_PROFILER_LABEL("MediaEncoder::WriteEncodedDataToMuxer", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (!aTrackEncoder) {
NS_ERROR("No track encoder to get data from");
return NS_ERROR_FAILURE;
}
if (aTrackEncoder->IsEncodingComplete()) {
return NS_OK;
}
EncodedFrameContainer encodedData;
nsresult rv = aTrackEncoder->GetEncodedTrack(encodedData);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from encoder."));
SetError();
return rv;
}
rv = mWriter->WriteEncodedTrack(
encodedData,
aTrackEncoder->IsEncodingComplete() ? ContainerWriter::END_OF_STREAM : 0);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error,
("Failed to write encoded track to the media container."));
SetError();
}
return rv;
}
nsresult MediaEncoder::CopyMetadataToMuxer(TrackEncoder* aTrackEncoder) {
AUTO_PROFILER_LABEL("MediaEncoder::CopyMetadataToMuxer", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (!aTrackEncoder) {
NS_ERROR("No track encoder to get metadata from");
return NS_ERROR_FAILURE;
}
RefPtr<TrackMetadataBase> meta = aTrackEncoder->GetMetadata();
if (meta == nullptr) {
LOG(LogLevel::Error, ("metadata == null"));
SetError();
return NS_ERROR_ABORT;
}
nsresult rv = mWriter->SetMetadata(meta);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("SetMetadata failed"));
SetError();
}
return rv;
}
bool MediaEncoder::IsShutdown() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
return mShutdown;
}
void MediaEncoder::Cancel() {
RefPtr<GenericNonExclusivePromise::AllPromiseType> MediaEncoder::Cancel() {
MOZ_ASSERT(NS_IsMainThread());
RefPtr<MediaEncoder> self = this;
nsresult rv = mEncoderThread->Dispatch(NewRunnableFrom([self]() mutable {
self->mCanceled = true;
Stop();
if (self->mAudioEncoder) {
self->mAudioEncoder->Cancel();
}
if (self->mVideoEncoder) {
self->mVideoEncoder->Cancel();
}
self->Shutdown();
return NS_OK;
}));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
return InvokeAsync(mEncoderThread, __func__,
[self = RefPtr<MediaEncoder>(this), this]() {
if (mAudioEncoder) {
mAudioEncoder->Cancel();
}
if (mVideoEncoder) {
mVideoEncoder->Cancel();
}
return Shutdown();
});
}
bool MediaEncoder::HasError() {
@ -1033,11 +855,18 @@ void MediaEncoder::Stop() {
}
}
#ifdef MOZ_WEBM_ENCODER
bool MediaEncoder::IsWebMEncoderEnabled() {
#ifdef MOZ_WEBM_ENCODER
return StaticPrefs::media_encoder_webm_enabled();
}
#else
return false;
#endif
}
const nsString& MediaEncoder::MimeType() const {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
return mMIMEType;
}
void MediaEncoder::NotifyInitialized() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
@ -1105,13 +934,13 @@ size_t MediaEncoder::SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) {
return size;
}
void MediaEncoder::SetVideoKeyFrameInterval(int32_t aVideoKeyFrameInterval) {
void MediaEncoder::SetVideoKeyFrameInterval(uint32_t aVideoKeyFrameInterval) {
if (!mVideoEncoder) {
return;
}
MOZ_ASSERT(mEncoderThread);
nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<int32_t>(
nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<uint32_t>(
"mozilla::VideoTrackEncoder::SetKeyFrameInterval", mVideoEncoder,
&VideoTrackEncoder::SetKeyFrameInterval, aVideoKeyFrameInterval));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
@ -1119,3 +948,5 @@ void MediaEncoder::SetVideoKeyFrameInterval(int32_t aVideoKeyFrameInterval) {
}
} // namespace mozilla
#undef LOG

View File

@ -7,6 +7,7 @@
#include "ContainerWriter.h"
#include "CubebUtils.h"
#include "MediaQueue.h"
#include "MediaTrackGraph.h"
#include "MediaTrackListener.h"
#include "mozilla/DebugOnly.h"
@ -18,6 +19,7 @@
namespace mozilla {
class DriftCompensator;
class Muxer;
class Runnable;
class TaskQueue;
@ -75,29 +77,21 @@ class MediaEncoderListener {
* been initialized and when there's data available.
* => encoder->RegisterListener(listener);
*
* 3) Connect the MediaStreamTracks to be recorded.
* => encoder->ConnectMediaStreamTrack(track);
* This creates the corresponding TrackEncoder and connects the track and
* the TrackEncoder through a track listener. This also starts encoding.
*
* 4) When the MediaEncoderListener is notified that the MediaEncoder is
* initialized, we can encode metadata.
* => encoder->GetEncodedMetadata(...);
*
* 5) When the MediaEncoderListener is notified that the MediaEncoder has
* data available, we can encode data.
* 3) When the MediaEncoderListener is notified that the MediaEncoder has
* data available, we can encode data. This also encodes metadata on its
* first invocation.
* => encoder->GetEncodedData(...);
*
* 6) To stop encoding, there are multiple options:
* 4) To stop encoding, there are multiple options:
*
* 6.1) Stop() for a graceful stop.
* 4.1) Stop() for a graceful stop.
* => encoder->Stop();
*
* 6.2) Cancel() for an immediate stop, if you don't need the data currently
* 4.2) Cancel() for an immediate stop, if you don't need the data currently
* buffered.
* => encoder->Cancel();
*
* 6.3) When all input tracks end, the MediaEncoder will automatically stop
* 4.3) When all input tracks end, the MediaEncoder will automatically stop
* and shut down.
*/
class MediaEncoder {
@ -156,44 +150,31 @@ class MediaEncoder {
uint32_t aAudioBitrate, uint32_t aVideoBitrate, uint8_t aTrackTypes,
TrackRate aTrackRate);
/**
* Encodes raw metadata for all tracks to aOutputBufs. aMIMEType is the valid
* mime-type for the returned container data. The buffer of container data is
* allocated in ContainerWriter::GetContainerData().
*
* Should there be insufficient input data for either track encoder to infer
* the metadata, or if metadata has already been encoded, we return an error
* and the output arguments are undefined. Otherwise we return NS_OK.
*/
nsresult GetEncodedMetadata(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsAString& aMIMEType);
/**
* Encodes raw data for all tracks to aOutputBufs. The buffer of container
* data is allocated in ContainerWriter::GetContainerData().
*
* This implies that metadata has already been encoded and that all track
* encoders are still active. Should either implication break, we return an
* error and the output argument is undefined. Otherwise we return NS_OK.
* On its first call, metadata is also encoded. TrackEncoders must have been
* initialized before this is called.
*/
nsresult GetEncodedData(nsTArray<nsTArray<uint8_t>>* aOutputBufs);
/**
* Return true if MediaEncoder has been shutdown. Reasons are encoding
* Asserts that Shutdown() has been called. Reasons are encoding
* complete, encounter an error, or being canceled by its caller.
*/
bool IsShutdown();
void AssertShutdownCalled() { MOZ_ASSERT(mShutdownPromise); }
/**
* Cancels the encoding and shuts down the encoder using Shutdown().
* Listeners are not notified of the shutdown.
*/
void Cancel();
RefPtr<GenericNonExclusivePromise::AllPromiseType> Cancel();
bool HasError();
#ifdef MOZ_WEBM_ENCODER
static bool IsWebMEncoderEnabled();
#endif
const nsString& MimeType() const;
/**
* Notifies listeners that this MediaEncoder has been initialized.
@ -228,7 +209,7 @@ class MediaEncoder {
/**
* Set desired video keyframe interval defined in milliseconds.
*/
void SetVideoKeyFrameInterval(int32_t aVideoKeyFrameInterval);
void SetVideoKeyFrameInterval(uint32_t aVideoKeyFrameInterval);
protected:
~MediaEncoder();
@ -250,7 +231,7 @@ class MediaEncoder {
* Shuts down the MediaEncoder and cleans up track encoders.
* Listeners will be notified of the shutdown unless we were Cancel()ed first.
*/
void Shutdown();
RefPtr<GenericNonExclusivePromise::AllPromiseType> Shutdown();
/**
* Sets mError to true, notifies listeners of the error if mError changed,
@ -258,15 +239,10 @@ class MediaEncoder {
*/
void SetError();
// Get encoded data from trackEncoder and write to muxer
nsresult WriteEncodedDataToMuxer(TrackEncoder* aTrackEncoder);
// Get metadata from trackEncoder and copy to muxer
nsresult CopyMetadataToMuxer(TrackEncoder* aTrackEncoder);
const RefPtr<TaskQueue> mEncoderThread;
const RefPtr<DriftCompensator> mDriftCompensator;
UniquePtr<ContainerWriter> mWriter;
UniquePtr<Muxer> mMuxer;
RefPtr<AudioTrackEncoder> mAudioEncoder;
RefPtr<AudioTrackListener> mAudioListener;
RefPtr<VideoTrackEncoder> mVideoEncoder;
@ -294,13 +270,12 @@ class MediaEncoder {
RefPtr<SharedDummyTrack> mGraphTrack;
TimeStamp mStartTime;
nsString mMIMEType;
const nsString mMIMEType;
bool mInitialized;
bool mMetadataEncoded;
bool mCompleted;
bool mError;
bool mCanceled;
bool mShutdown;
// Set when shutdown starts.
RefPtr<GenericNonExclusivePromise::AllPromiseType> mShutdownPromise;
// Get duration from create encoder, for logging purpose
double GetEncodeTimeStamp() {
TimeDuration decodeTime;

228
dom/media/encoder/Muxer.cpp Normal file
View File

@ -0,0 +1,228 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Muxer.h"
#include "ContainerWriter.h"
namespace mozilla {
LazyLogModule gMuxerLog("Muxer");
#define LOG(type, ...) MOZ_LOG(gMuxerLog, type, (__VA_ARGS__))
Muxer::Muxer(UniquePtr<ContainerWriter> aWriter)
: mWriter(std::move(aWriter)) {}
bool Muxer::IsFinished() { return mWriter->IsWritingComplete(); }
nsresult Muxer::SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) {
nsresult rv = mWriter->SetMetadata(aMetadata);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Setting metadata failed, tracks=%zu", this,
aMetadata.Length());
return rv;
}
for (const auto& track : aMetadata) {
switch (track->GetKind()) {
case TrackMetadataBase::METADATA_OPUS: {
// In the case of Opus we need to calculate the codec delay based on the
// pre-skip. For more information see:
// https://tools.ietf.org/html/rfc7845#section-4.2
// Calculate offset in microseconds
OpusMetadata* opusMeta = static_cast<OpusMetadata*>(track.get());
mAudioCodecDelay = static_cast<uint64_t>(
LittleEndian::readUint16(opusMeta->mIdHeader.Elements() + 10) *
PR_USEC_PER_SEC / 48000);
[[fallthrough]];
}
case TrackMetadataBase::METADATA_VORBIS:
case TrackMetadataBase::METADATA_AAC:
case TrackMetadataBase::METADATA_AMR:
case TrackMetadataBase::METADATA_EVRC:
MOZ_ASSERT(!mHasAudio, "Only one audio track supported");
mHasAudio = true;
break;
case TrackMetadataBase::METADATA_VP8:
MOZ_ASSERT(!mHasVideo, "Only one video track supported");
mHasVideo = true;
break;
default:
MOZ_CRASH("Unknown codec metadata");
};
}
mMetadataSet = true;
MOZ_ASSERT(mHasAudio || mHasVideo);
if (!mHasAudio) {
mEncodedAudioFrames.Finish();
MOZ_ASSERT(mEncodedAudioFrames.AtEndOfStream());
}
if (!mHasVideo) {
mEncodedVideoFrames.Finish();
MOZ_ASSERT(mEncodedVideoFrames.AtEndOfStream());
}
LOG(LogLevel::Info, "%p Metadata set; audio=%d, video=%d", this, mHasAudio,
mHasVideo);
return rv;
}
void Muxer::AddEncodedAudioFrame(EncodedFrame* aFrame) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio);
if (aFrame->mFrameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME) {
aFrame->mTime += mAudioCodecDelay;
}
mEncodedAudioFrames.Push(aFrame);
LOG(LogLevel::Verbose,
"%p Added audio frame of type %u, [start %" PRIu64 ", end %" PRIu64 ")",
this, aFrame->mFrameType, aFrame->mTime,
aFrame->mTime + aFrame->mDuration);
}
void Muxer::AddEncodedVideoFrame(EncodedFrame* aFrame) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasVideo);
mEncodedVideoFrames.Push(aFrame);
LOG(LogLevel::Verbose,
"%p Added video frame of type %u, [start %" PRIu64 ", end %" PRIu64 ")",
this, aFrame->mFrameType, aFrame->mTime,
aFrame->mTime + aFrame->mDuration);
}
void Muxer::AudioEndOfStream() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio);
LOG(LogLevel::Info, "%p Reached audio EOS", this);
mEncodedAudioFrames.Finish();
}
void Muxer::VideoEndOfStream() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasVideo);
LOG(LogLevel::Info, "%p Reached video EOS", this);
mEncodedVideoFrames.Finish();
}
nsresult Muxer::GetData(nsTArray<nsTArray<uint8_t>>* aOutputBuffers) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio || mHasVideo);
nsresult rv;
if (!mMetadataEncoded) {
rv = mWriter->GetContainerData(aOutputBuffers, ContainerWriter::GET_HEADER);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Failed getting metadata from writer", this);
return rv;
}
mMetadataEncoded = true;
}
if (mEncodedAudioFrames.GetSize() == 0 && !mEncodedAudioFrames.IsFinished() &&
mEncodedVideoFrames.GetSize() == 0 && !mEncodedVideoFrames.IsFinished()) {
// Nothing to mux.
return NS_OK;
}
rv = Mux();
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Failed muxing data into writer", this);
return rv;
}
MOZ_ASSERT_IF(
mEncodedAudioFrames.IsFinished() && mEncodedVideoFrames.IsFinished(),
mEncodedAudioFrames.AtEndOfStream());
MOZ_ASSERT_IF(
mEncodedAudioFrames.IsFinished() && mEncodedVideoFrames.IsFinished(),
mEncodedVideoFrames.AtEndOfStream());
uint32_t flags =
mEncodedAudioFrames.AtEndOfStream() && mEncodedVideoFrames.AtEndOfStream()
? ContainerWriter::FLUSH_NEEDED
: 0;
if (mEncodedAudioFrames.AtEndOfStream() &&
mEncodedVideoFrames.AtEndOfStream()) {
LOG(LogLevel::Info, "%p All data written", this);
}
return mWriter->GetContainerData(aOutputBuffers, flags);
}
nsresult Muxer::Mux() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio || mHasVideo);
nsTArray<RefPtr<EncodedFrame>> frames;
// The times at which we expect our next video and audio frames. These are
// based on the time + duration (GetEndTime()) of the last seen frames.
// Assumes that the encoders write the correct duration for frames.;
uint64_t expectedNextVideoTime = 0;
uint64_t expectedNextAudioTime = 0;
// Interleave frames until we're out of audio or video
while (mEncodedVideoFrames.GetSize() > 0 &&
mEncodedAudioFrames.GetSize() > 0) {
RefPtr<EncodedFrame> videoFrame = mEncodedVideoFrames.PeekFront();
RefPtr<EncodedFrame> audioFrame = mEncodedAudioFrames.PeekFront();
// For any expected time our frames should occur at or after that time.
MOZ_ASSERT(videoFrame->mTime >= expectedNextVideoTime);
MOZ_ASSERT(audioFrame->mTime >= expectedNextAudioTime);
if (videoFrame->mTime <= audioFrame->mTime) {
expectedNextVideoTime = videoFrame->GetEndTime();
RefPtr<EncodedFrame> frame = mEncodedVideoFrames.PopFront();
frames.AppendElement(frame);
} else {
expectedNextAudioTime = audioFrame->GetEndTime();
RefPtr<EncodedFrame> frame = mEncodedAudioFrames.PopFront();
frames.AppendElement(frame);
}
}
// If we're out of audio we still may be able to add more video...
if (mEncodedAudioFrames.GetSize() == 0) {
while (mEncodedVideoFrames.GetSize() > 0) {
if (!mEncodedAudioFrames.AtEndOfStream() &&
mEncodedVideoFrames.PeekFront()->mTime > expectedNextAudioTime) {
// Audio encoding is not complete and since the video frame comes
// after our next audio frame we cannot safely add it.
break;
}
frames.AppendElement(mEncodedVideoFrames.PopFront());
}
}
// If we're out of video we still may be able to add more audio...
if (mEncodedVideoFrames.GetSize() == 0) {
while (mEncodedAudioFrames.GetSize() > 0) {
if (!mEncodedVideoFrames.AtEndOfStream() &&
mEncodedAudioFrames.PeekFront()->mTime > expectedNextVideoTime) {
// Video encoding is not complete and since the audio frame comes
// after our next video frame we cannot safely add it.
break;
}
frames.AppendElement(mEncodedAudioFrames.PopFront());
}
}
LOG(LogLevel::Debug,
"%p Muxed data, remaining-audio=%zu, remaining-video=%zu", this,
mEncodedAudioFrames.GetSize(), mEncodedVideoFrames.GetSize());
// If encoding is complete for both encoders we should signal end of stream,
// otherwise we keep going.
uint32_t flags =
mEncodedVideoFrames.AtEndOfStream() && mEncodedAudioFrames.AtEndOfStream()
? ContainerWriter::END_OF_STREAM
: 0;
nsresult rv = mWriter->WriteEncodedTrack(frames, flags);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "Error! Failed to write muxed data to the container");
}
return rv;
}
} // namespace mozilla
#undef LOG

74
dom/media/encoder/Muxer.h Normal file
View File

@ -0,0 +1,74 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_ENCODER_MUXER_H_
#define DOM_MEDIA_ENCODER_MUXER_H_
#include "MediaQueue.h"
namespace mozilla {
class ContainerWriter;
// Generic Muxer class that helps pace the output from track encoders to the
// ContainerWriter, so time never appears to go backwards.
// Note that the entire class is written for single threaded access.
class Muxer {
public:
explicit Muxer(UniquePtr<ContainerWriter> aWriter);
~Muxer() = default;
// Returns true when all tracks have ended, and all data has been muxed and
// fetched.
bool IsFinished();
// Returns true if this muxer has not been given metadata yet.
bool NeedsMetadata() const { return !mMetadataSet; }
// Sets metadata for all tracks. This may only be called once.
nsresult SetMetadata(const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata);
// Adds an encoded audio frame for muxing
void AddEncodedAudioFrame(EncodedFrame* aFrame);
// Adds an encoded video frame for muxing
void AddEncodedVideoFrame(EncodedFrame* aFrame);
// Marks the audio track as ended. Once all tracks for which we have metadata
// have ended, GetData() will drain and the muxer will be marked as finished.
void AudioEndOfStream();
// Marks the video track as ended. Once all tracks for which we have metadata
// have ended, GetData() will drain and the muxer will be marked as finished.
void VideoEndOfStream();
// Gets the data that has been muxed and written into the container so far.
nsresult GetData(nsTArray<nsTArray<uint8_t>>* aOutputBuffers);
private:
// Writes data in MediaQueues to the ContainerWriter.
nsresult Mux();
// Audio frames that have been encoded and are pending write to the muxer.
MediaQueue<EncodedFrame> mEncodedAudioFrames;
// Video frames that have been encoded and are pending write to the muxer.
MediaQueue<EncodedFrame> mEncodedVideoFrames;
// The writer for the specific container we're recording into.
UniquePtr<ContainerWriter> mWriter;
// How much each audio time stamp should be delayed in microseconds. Used to
// adjust for opus codec delay.
uint64_t mAudioCodecDelay = 0;
// True once metadata has been set in the muxer.
bool mMetadataSet = false;
// True once metadata has been written to file.
bool mMetadataEncoded = false;
// True if metadata is set and contains an audio track.
bool mHasAudio = false;
// True if metadata is set and contains a video track.
bool mHasVideo = false;
};
} // namespace mozilla
#endif

View File

@ -9,7 +9,6 @@
#include <opus/opus.h>
#undef LOG
#define LOG(args, ...)
namespace mozilla {
@ -227,7 +226,8 @@ already_AddRefed<TrackMetadataBase> OpusTrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
nsresult OpusTrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
AUTO_PROFILER_LABEL("OpusTrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -324,7 +324,7 @@ nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
MOZ_ASSERT(frameCopied <= 3844, "frameCopied exceeded expected range");
RefPtr<EncodedFrame> audiodata = new EncodedFrame();
audiodata->SetFrameType(EncodedFrame::OPUS_AUDIO_FRAME);
audiodata->mFrameType = EncodedFrame::OPUS_AUDIO_FRAME;
int framesInPCM = frameCopied;
if (mResampler) {
AutoTArray<AudioDataValue, 9600> resamplingDest;
@ -366,10 +366,10 @@ nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
mResampledLeftover.Length());
// This is always at 48000Hz.
framesInPCM = framesLeft + outframesToCopy;
audiodata->SetDuration(framesInPCM);
audiodata->mDuration = framesInPCM;
} else {
// The ogg time stamping and pre-skip is always timed at 48000.
audiodata->SetDuration(frameCopied * (kOpusSamplingRate / mSamplingRate));
audiodata->mDuration = frameCopied * (kOpusSamplingRate / mSamplingRate);
}
// Remove the raw data which has been pulled to pcm buffer.
@ -421,14 +421,16 @@ nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
audiodata->SwapInFrameData(frameData);
// timestamp should be the time of the first sample
audiodata->SetTimeStamp(mOutputTimeStamp);
audiodata->mTime = mOutputTimeStamp;
mOutputTimeStamp +=
FramesToUsecs(GetPacketDuration(), kOpusSamplingRate).value();
LOG("[Opus] mOutputTimeStamp %lld.", mOutputTimeStamp);
aData.AppendEncodedFrame(audiodata);
aData.AppendElement(audiodata);
}
return result >= 0 ? NS_OK : NS_ERROR_FAILURE;
}
} // namespace mozilla
#undef LOG

View File

@ -32,7 +32,7 @@ class OpusTrackEncoder : public AudioTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() override;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) override;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) override;
protected:
int GetPacketDuration() override;

View File

@ -28,7 +28,7 @@ static const int AUDIO_INIT_FAILED_DURATION = 1;
static const int VIDEO_INIT_FAILED_DURATION = 30;
// A maximal key frame interval allowed to set.
// Longer values will be shorten to this value.
static const int DEFAULT_KEYFRAME_INTERVAL_MS = 1000;
static const unsigned int DEFAULT_KEYFRAME_INTERVAL_MS = 1000;
TrackEncoder::TrackEncoder(TrackRate aTrackRate)
: mEncodingComplete(false),
@ -752,9 +752,15 @@ size_t VideoTrackEncoder::SizeOfExcludingThis(
mOutgoingBuffer.SizeOfExcludingThis(aMallocSizeOf);
}
void VideoTrackEncoder::SetKeyFrameInterval(int32_t aKeyFrameInterval) {
void VideoTrackEncoder::SetKeyFrameInterval(uint32_t aKeyFrameInterval) {
MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
if (aKeyFrameInterval == 0) {
mKeyFrameInterval = DEFAULT_KEYFRAME_INTERVAL_MS;
return;
}
mKeyFrameInterval = std::min(aKeyFrameInterval, DEFAULT_KEYFRAME_INTERVAL_MS);
}
} // namespace mozilla
#undef TRACK_LOG

View File

@ -6,7 +6,7 @@
#define TrackEncoder_h_
#include "AudioSegment.h"
#include "EncodedFrameContainer.h"
#include "EncodedFrame.h"
#include "MediaTrackGraph.h"
#include "TrackMetadataBase.h"
#include "VideoSegment.h"
@ -80,7 +80,7 @@ class TrackEncoder {
* Encodes raw segments. Result data is returned in aData, and called on the
* worker thread.
*/
virtual nsresult GetEncodedTrack(EncodedFrameContainer& aData) = 0;
virtual nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) = 0;
/**
* Returns true once this TrackEncoder is initialized.
@ -419,7 +419,7 @@ class VideoTrackEncoder : public TrackEncoder {
/**
* Set desired keyframe interval defined in milliseconds.
*/
void SetKeyFrameInterval(int32_t aKeyFrameInterval);
void SetKeyFrameInterval(uint32_t aKeyFrameInterval);
protected:
/**
@ -517,7 +517,7 @@ class VideoTrackEncoder : public TrackEncoder {
/**
* The desired keyframe interval defined in milliseconds.
*/
int32_t mKeyFrameInterval;
uint32_t mKeyFrameInterval;
/**
* True if the video MediaTrackTrack this VideoTrackEncoder is attached to is

View File

@ -219,7 +219,8 @@ already_AddRefed<TrackMetadataBase> VP8TrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
nsresult VP8TrackEncoder::GetEncodedPartitions(
nsTArray<RefPtr<EncodedFrame>>& aData) {
vpx_codec_iter_t iter = nullptr;
EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
nsTArray<uint8_t> frameData;
@ -248,7 +249,7 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
if (!frameData.IsEmpty()) {
// Copy the encoded data to aData.
EncodedFrame* videoData = new EncodedFrame();
videoData->SetFrameType(frameType);
videoData->mFrameType = frameType;
// Convert the timestamp and duration to Usecs.
CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
@ -256,7 +257,7 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
NS_ERROR("Microsecond timestamp overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
videoData->SetTimeStamp((uint64_t)timestamp.value());
videoData->mTime = (uint64_t)timestamp.value();
mExtractedDuration += pkt->data.frame.duration;
if (!mExtractedDuration.isValid()) {
@ -278,14 +279,13 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
}
mExtractedDurationUs = totalDuration;
videoData->SetDuration((uint64_t)duration.value());
videoData->mDuration = (uint64_t)duration.value();
videoData->SwapInFrameData(frameData);
VP8LOG(LogLevel::Verbose,
"GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64
", FrameType %d",
videoData->GetTimeStamp(), videoData->GetDuration(),
videoData->GetFrameType());
aData.AppendEncodedFrame(videoData);
videoData->mTime, videoData->mDuration, videoData->mFrameType);
aData.AppendElement(videoData);
}
return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
@ -440,7 +440,8 @@ VP8TrackEncoder::EncodeOperation VP8TrackEncoder::GetNextEncodeOperation(
* encode it.
* 4. Remove the encoded chunks in mSourceSegment after for-loop.
*/
nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
nsresult VP8TrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
AUTO_PROFILER_LABEL("VP8TrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -508,7 +509,7 @@ nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
// because this frame will be skipped.
VP8LOG(LogLevel::Warning,
"MediaRecorder lagging behind. Skipping a frame.");
RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
RefPtr<EncodedFrame> last = aData.LastElement();
if (last) {
mExtractedDuration += chunk.mDuration;
if (!mExtractedDuration.isValid()) {
@ -524,8 +525,7 @@ nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
NS_ERROR("skipped duration overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
last->SetDuration(last->GetDuration() +
(static_cast<uint64_t>(skippedDuration.value())));
last->mDuration += static_cast<uint64_t>(skippedDuration.value());
}
}
@ -569,3 +569,5 @@ nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
}
} // namespace mozilla
#undef VP8LOG

View File

@ -33,7 +33,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() final;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) final;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) final;
protected:
nsresult Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
@ -49,7 +49,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
// null for EOS detection.
// NS_OK if some data was appended to aData.
// An error nsresult otherwise.
nsresult GetEncodedPartitions(EncodedFrameContainer& aData);
nsresult GetEncodedPartitions(nsTArray<RefPtr<EncodedFrame>>& aData);
// Prepare the input data to the mVPXImageWrapper for encoding.
nsresult PrepareRawFrame(VideoChunk& aChunk);

View File

@ -7,7 +7,7 @@ with Files('*'):
EXPORTS += [
'ContainerWriter.h',
'EncodedFrameContainer.h',
'EncodedFrame.h',
'MediaEncoder.h',
'OpusTrackEncoder.h',
'TrackEncoder.h',
@ -16,6 +16,7 @@ EXPORTS += [
UNIFIED_SOURCES += [
'MediaEncoder.cpp',
'Muxer.cpp',
'OpusTrackEncoder.cpp',
'TrackEncoder.cpp',
]

View File

@ -0,0 +1,25 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#include "AudioGenerator.h"
#include "AudioSegment.h"
using namespace mozilla;
AudioGenerator::AudioGenerator(int32_t aChannels, int32_t aSampleRate)
: mGenerator(aSampleRate, 1000), mChannels(aChannels) {}
void AudioGenerator::Generate(AudioSegment& aSegment, const int32_t& aSamples) {
RefPtr<SharedBuffer> buffer =
SharedBuffer::Create(aSamples * sizeof(int16_t));
int16_t* dest = static_cast<int16_t*>(buffer->Data());
mGenerator.generate(dest, aSamples);
AutoTArray<const int16_t*, 1> channels;
for (int32_t i = 0; i < mChannels; i++) {
channels.AppendElement(dest);
}
aSegment.AppendFrames(buffer.forget(), channels, aSamples,
PRINCIPAL_HANDLE_NONE);
}

View File

@ -0,0 +1,25 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_
#define DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_
#include "prtime.h"
#include "SineWaveGenerator.h"
namespace mozilla {
class AudioSegment;
}
class AudioGenerator {
public:
AudioGenerator(int32_t aChannels, int32_t aSampleRate);
void Generate(mozilla::AudioSegment& aSegment, const int32_t& aSamples);
private:
mozilla::SineWaveGenerator mGenerator;
const int32_t mChannels;
};
#endif // DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_

View File

@ -2,7 +2,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#define ENABLE_SET_CUBEB_BACKEND 1
#include "CubebUtils.h"
#include "GraphDriver.h"
#include "MediaTrackGraphImpl.h"
@ -26,9 +26,9 @@ RefPtr<MediaTrackGraphImpl> MakeMTGImpl() {
}
TEST(TestAudioCallbackDriver, StartStop)
{
MOZ_CAN_RUN_SCRIPT_FOR_DEFINITION {
MockCubeb* mock = new MockCubeb();
mozilla::CubebUtils::ForceSetCubebContext(mock->AsCubebContext());
CubebUtils::ForceSetCubebContext(mock->AsCubebContext());
RefPtr<MediaTrackGraphImpl> graph = MakeMTGImpl();
EXPECT_TRUE(!!graph->mDriver) << "AudioCallbackDriver created.";
@ -44,7 +44,7 @@ TEST(TestAudioCallbackDriver, StartStop)
EXPECT_TRUE(driver->IsStarted()) << "Verify thread is started";
// This will block untill all events has been executed.
driver->AsAudioCallbackDriver()->Shutdown();
MOZ_KnownLive(driver->AsAudioCallbackDriver())->Shutdown();
EXPECT_FALSE(driver->ThreadRunning()) << "Verify thread is not running";
EXPECT_FALSE(driver->IsStarted()) << "Verify thread is not started";
@ -53,5 +53,6 @@ TEST(TestAudioCallbackDriver, StartStop)
// block for ever if it was not cleared. The same logic exists in
// MediaTrackGraphShutDownRunnable
graph->mDriver = nullptr;
graph->RemoveShutdownBlocker();
}
#undef ENABLE_SET_CUBEB_BACKEND

View File

@ -85,7 +85,7 @@ TEST(AudioMixer, Test)
{
int iterations = 2;
mozilla::AudioMixer mixer;
mixer.AddCallback(&consumer);
mixer.AddCallback(WrapNotNull(&consumer));
fprintf(stderr, "Test AudioMixer constant buffer length.\n");
@ -98,7 +98,7 @@ TEST(AudioMixer, Test)
{
mozilla::AudioMixer mixer;
mixer.AddCallback(&consumer);
mixer.AddCallback(WrapNotNull(&consumer));
fprintf(stderr, "Test AudioMixer variable buffer length.\n");
@ -136,7 +136,7 @@ TEST(AudioMixer, Test)
{
mozilla::AudioMixer mixer;
mixer.AddCallback(&consumer);
mixer.AddCallback(WrapNotNull(&consumer));
fprintf(stderr, "Test AudioMixer variable channel count.\n");
@ -153,7 +153,7 @@ TEST(AudioMixer, Test)
{
mozilla::AudioMixer mixer;
mixer.AddCallback(&consumer);
mixer.AddCallback(WrapNotNull(&consumer));
fprintf(stderr, "Test AudioMixer variable stream count.\n");
mixer.Mix(a, 2, CHANNEL_LENGTH, AUDIO_RATE);

View File

@ -4,33 +4,11 @@
#include "gtest/gtest.h"
#include "OpusTrackEncoder.h"
#include "SineWaveGenerator.h"
#include "AudioGenerator.h"
using namespace mozilla;
class AudioGenerator {
public:
AudioGenerator(int32_t aChannels, int32_t aSampleRate)
: mGenerator(aSampleRate, 1000), mChannels(aChannels) {}
void Generate(AudioSegment& aSegment, const int32_t& aSamples) {
RefPtr<SharedBuffer> buffer =
SharedBuffer::Create(aSamples * sizeof(int16_t));
int16_t* dest = static_cast<int16_t*>(buffer->Data());
mGenerator.generate(dest, aSamples);
AutoTArray<const int16_t*, 1> channels;
for (int32_t i = 0; i < mChannels; i++) {
channels.AppendElement(dest);
}
aSegment.AppendFrames(buffer.forget(), channels, aSamples,
PRINCIPAL_HANDLE_NONE);
}
private:
SineWaveGenerator mGenerator;
const int32_t mChannels;
};
class TestOpusTrackEncoder : public OpusTrackEncoder {
public:
TestOpusTrackEncoder() : OpusTrackEncoder(90000) {}
@ -222,13 +200,13 @@ TEST(OpusAudioTrackEncoder, FrameEncode)
encoder.AppendAudioSegment(std::move(segment));
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
// Verify that encoded data is 5 seconds long.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
// 44100 as used above gets resampled to 48000 for opus.
const uint64_t five = 48000 * 5;

View File

@ -25,57 +25,142 @@
} \
} while (0)
#define BLOCK_SIZE 64
#define WIDTH 640
#define HEIGHT 480
#define NUM_FRAMES 150UL
#define FRAME_RATE 30
#define FRAME_DURATION (1000000 / FRAME_RATE)
#define BIT_RATE (1000 * 1000) // 1Mbps
#define KEYFRAME_INTERVAL FRAME_RATE // 1 keyframe per second
using namespace mozilla;
static gfx::IntSize kImageSize(640, 480);
static gfx::IntSize kImageSize(WIDTH, HEIGHT);
class MediaDataEncoderTest : public testing::Test {
protected:
void SetUp() override { InitData(kImageSize); }
void SetUp() override { mData.Init(kImageSize); }
void TearDown() override { DeinitData(); }
void TearDown() override { mData.Deinit(); }
layers::PlanarYCbCrData mData;
UniquePtr<uint8_t[]> mBackBuffer;
public:
struct FrameSource final {
layers::PlanarYCbCrData mYUV;
UniquePtr<uint8_t[]> mBuffer;
RefPtr<layers::BufferRecycleBin> mRecycleBin;
int16_t mColorStep = 4;
private:
void InitData(const gfx::IntSize& aSize) {
mData.mPicSize = aSize;
mData.mYStride = aSize.width;
mData.mYSize = aSize;
mData.mCbCrStride = aSize.width / 2;
mData.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
size_t bufferSize = mData.mYStride * mData.mYSize.height +
mData.mCbCrStride * mData.mCbCrSize.height +
mData.mCbCrStride * mData.mCbCrSize.height;
mBackBuffer = MakeUnique<uint8_t[]>(bufferSize);
std::fill_n(mBackBuffer.get(), bufferSize, 42);
mData.mYChannel = mBackBuffer.get();
mData.mCbChannel = mData.mYChannel + mData.mYStride * mData.mYSize.height;
mData.mCrChannel =
mData.mCbChannel + mData.mCbCrStride * mData.mCbCrSize.height;
}
void Init(const gfx::IntSize& aSize) {
mYUV.mPicSize = aSize;
mYUV.mYStride = aSize.width;
mYUV.mYSize = aSize;
mYUV.mCbCrStride = aSize.width / 2;
mYUV.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
size_t bufferSize = mYUV.mYStride * mYUV.mYSize.height +
mYUV.mCbCrStride * mYUV.mCbCrSize.height +
mYUV.mCbCrStride * mYUV.mCbCrSize.height;
mBuffer = MakeUnique<uint8_t[]>(bufferSize);
std::fill_n(mBuffer.get(), bufferSize, 0x7F);
mYUV.mYChannel = mBuffer.get();
mYUV.mCbChannel = mYUV.mYChannel + mYUV.mYStride * mYUV.mYSize.height;
mYUV.mCrChannel =
mYUV.mCbChannel + mYUV.mCbCrStride * mYUV.mCbCrSize.height;
mRecycleBin = new layers::BufferRecycleBin();
}
void DeinitData() { mBackBuffer.reset(); }
void Deinit() {
mBuffer.reset();
mRecycleBin = nullptr;
}
already_AddRefed<MediaData> GetFrame(const size_t aIndex) {
Draw(aIndex);
RefPtr<layers::PlanarYCbCrImage> img =
new layers::RecyclingPlanarYCbCrImage(mRecycleBin);
img->CopyData(mYUV);
RefPtr<MediaData> frame = VideoData::CreateFromImage(
kImageSize, 0, TimeUnit::FromMicroseconds(aIndex * FRAME_DURATION),
TimeUnit::FromMicroseconds(FRAME_DURATION), img, (aIndex & 0xF) == 0,
TimeUnit::FromMicroseconds(aIndex * FRAME_DURATION));
return frame.forget();
}
void DrawChessboard(uint8_t* aAddr, const size_t aWidth,
const size_t aHeight, const size_t aOffset) {
uint8_t pixels[2][BLOCK_SIZE];
size_t x = aOffset % BLOCK_SIZE;
if ((aOffset / BLOCK_SIZE) & 1) {
x = BLOCK_SIZE - x;
}
for (size_t i = 0; i < x; i++) {
pixels[0][i] = 0x00;
pixels[1][i] = 0xFF;
}
for (size_t i = x; i < BLOCK_SIZE; i++) {
pixels[0][i] = 0xFF;
pixels[1][i] = 0x00;
}
uint8_t* p = aAddr;
for (size_t row = 0; row < aHeight; row++) {
for (size_t col = 0; col < aWidth; col += BLOCK_SIZE) {
memcpy(p, pixels[((row / BLOCK_SIZE) + (col / BLOCK_SIZE)) % 2],
BLOCK_SIZE);
p += BLOCK_SIZE;
}
}
}
void Draw(const size_t aIndex) {
DrawChessboard(mYUV.mYChannel, mYUV.mYSize.width, mYUV.mYSize.height,
aIndex << 1);
int16_t color = mYUV.mCbChannel[0] + mColorStep;
if (color > 255 || color < 0) {
mColorStep = -mColorStep;
color = mYUV.mCbChannel[0] + mColorStep;
}
size_t size = (mYUV.mCrChannel - mYUV.mCbChannel);
std::fill_n(mYUV.mCbChannel, size, static_cast<uint8_t>(color));
std::fill_n(mYUV.mCrChannel, size, 0xFF - static_cast<uint8_t>(color));
}
};
public:
FrameSource mData;
};
static already_AddRefed<MediaDataEncoder> CreateH264Encoder(
MediaDataEncoder::Usage aUsage,
MediaDataEncoder::PixelFormat aPixelFormat) {
MediaDataEncoder::Usage aUsage, MediaDataEncoder::PixelFormat aPixelFormat,
const Maybe<MediaDataEncoder::H264Specific>& aSpecific =
Some(MediaDataEncoder::H264Specific(
KEYFRAME_INTERVAL,
MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel))) {
RefPtr<PEMFactory> f(new PEMFactory());
if (!f->SupportsMimeType(NS_LITERAL_CSTRING(VIDEO_MP4))) {
return nullptr;
}
VideoInfo videoInfo(1280, 720);
VideoInfo videoInfo(WIDTH, HEIGHT);
videoInfo.mMimeType = NS_LITERAL_CSTRING(VIDEO_MP4);
const RefPtr<TaskQueue> taskQueue(
new TaskQueue(GetMediaThreadPool(MediaThreadType::PLAYBACK)));
CreateEncoderParams c(videoInfo /* track info */, aUsage, taskQueue,
aPixelFormat, 30 /* FPS */,
10 * 1024 * 1024 /* bitrate */);
return f->CreateEncoder(c);
RefPtr<MediaDataEncoder> e;
if (aSpecific) {
e = f->CreateEncoder(CreateEncoderParams(
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
FRAME_RATE /* FPS */, BIT_RATE /* bitrate */, aSpecific.value()));
} else {
e = f->CreateEncoder(CreateEncoderParams(
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
FRAME_RATE /* FPS */, BIT_RATE /* bitrate */));
}
return e.forget();
}
void WaitForShutdown(RefPtr<MediaDataEncoder> aEncoder) {
@ -125,6 +210,22 @@ static bool EnsureInit(RefPtr<MediaDataEncoder> aEncoder) {
return succeeded;
}
TEST_F(MediaDataEncoderTest, H264InitWithoutSpecific) {
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
RefPtr<MediaDataEncoder> e =
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
MediaDataEncoder::PixelFormat::YUV420P, Nothing());
#if defined(MOZ_WIDGET_ANDROID) // Android encoder requires I-frame interval
EXPECT_FALSE(EnsureInit(e));
#else
EXPECT_TRUE(EnsureInit(e));
#endif
WaitForShutdown(e);
}
TEST_F(MediaDataEncoderTest, H264Init) {
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
@ -139,17 +240,11 @@ TEST_F(MediaDataEncoderTest, H264Init) {
static MediaDataEncoder::EncodedData Encode(
const RefPtr<MediaDataEncoder> aEncoder, const size_t aNumFrames,
const layers::PlanarYCbCrData& aYCbCrData) {
MediaDataEncoderTest::FrameSource& aSource) {
MediaDataEncoder::EncodedData output;
bool succeeded;
for (size_t i = 0; i < aNumFrames; i++) {
RefPtr<layers::PlanarYCbCrImage> img =
new layers::RecyclingPlanarYCbCrImage(new layers::BufferRecycleBin());
img->AdoptData(aYCbCrData);
RefPtr<MediaData> frame = VideoData::CreateFromImage(
kImageSize, 0, TimeUnit::FromMicroseconds(i * 30000),
TimeUnit::FromMicroseconds(30000), img, (i & 0xF) == 0,
TimeUnit::FromMicroseconds(i * 30000));
RefPtr<MediaData> frame = aSource.GetFrame(i);
media::Await(
GetMediaThreadPool(MediaThreadType::PLAYBACK), aEncoder->Encode(frame),
[&output, &succeeded](MediaDataEncoder::EncodedData encoded) {
@ -164,29 +259,20 @@ static MediaDataEncoder::EncodedData Encode(
}
size_t pending = 0;
media::Await(
GetMediaThreadPool(MediaThreadType::PLAYBACK), aEncoder->Drain(),
[&pending, &output, &succeeded](MediaDataEncoder::EncodedData encoded) {
pending = encoded.Length();
output.AppendElements(std::move(encoded));
succeeded = true;
},
[&succeeded](MediaResult r) { succeeded = false; });
EXPECT_TRUE(succeeded);
if (!succeeded) {
return output;
}
if (pending > 0) {
do {
media::Await(
GetMediaThreadPool(MediaThreadType::PLAYBACK), aEncoder->Drain(),
[&succeeded](MediaDataEncoder::EncodedData encoded) {
EXPECT_EQ(encoded.Length(), 0UL);
[&pending, &output, &succeeded](MediaDataEncoder::EncodedData encoded) {
pending = encoded.Length();
output.AppendElements(std::move(encoded));
succeeded = true;
},
[&succeeded](MediaResult r) { succeeded = false; });
EXPECT_TRUE(succeeded);
}
if (!succeeded) {
return output;
}
} while (pending > 0);
return output;
}
@ -214,8 +300,8 @@ TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAnnexB) {
MediaDataEncoder::PixelFormat::YUV420P);
EnsureInit(e);
MediaDataEncoder::EncodedData output = Encode(e, 30UL, mData);
EXPECT_EQ(output.Length(), 30UL);
MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
EXPECT_EQ(output.Length(), NUM_FRAMES);
for (auto frame : output) {
EXPECT_TRUE(AnnexB::IsAnnexB(frame));
}
@ -230,12 +316,12 @@ TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAVCC) {
MediaDataEncoder::Usage::Record, MediaDataEncoder::PixelFormat::YUV420P);
EnsureInit(e);
MediaDataEncoder::EncodedData output = Encode(e, 30UL, mData);
EXPECT_EQ(output.Length(), 30UL);
MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
EXPECT_EQ(output.Length(), NUM_FRAMES);
AnnexB::IsAVCC(output[0]); // Only 1st frame has extra data.
for (auto frame : output) {
EXPECT_FALSE(AnnexB::IsAnnexB(frame));
}
WaitForShutdown(e);
}
}

View File

@ -0,0 +1,212 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <vector>
#include "ContainerWriter.h"
#include "EncodedFrame.h"
#include "gtest/gtest.h"
#include "gmock/gmock.h"
#include "Muxer.h"
#include "OpusTrackEncoder.h"
#include "WebMWriter.h"
using namespace mozilla;
using testing::_;
using testing::ElementsAre;
using testing::Return;
using testing::StaticAssertTypeEq;
static RefPtr<TrackMetadataBase> CreateOpusMetadata(int32_t aChannels,
float aSamplingFrequency,
size_t aIdHeaderSize,
size_t aCommentHeaderSize) {
auto opusMetadata = MakeRefPtr<OpusMetadata>();
opusMetadata->mChannels = aChannels;
opusMetadata->mSamplingFrequency = aSamplingFrequency;
opusMetadata->mIdHeader.SetLength(aIdHeaderSize);
for (size_t i = 0; i < opusMetadata->mIdHeader.Length(); i++) {
opusMetadata->mIdHeader[i] = 0;
}
opusMetadata->mCommentHeader.SetLength(aCommentHeaderSize);
for (size_t i = 0; i < opusMetadata->mCommentHeader.Length(); i++) {
opusMetadata->mCommentHeader[i] = 0;
}
return opusMetadata;
}
static RefPtr<TrackMetadataBase> CreateVP8Metadata(int32_t aWidth,
int32_t aHeight) {
auto vp8Metadata = MakeRefPtr<VP8Metadata>();
vp8Metadata->mWidth = aWidth;
vp8Metadata->mDisplayWidth = aWidth;
vp8Metadata->mHeight = aHeight;
vp8Metadata->mDisplayHeight = aHeight;
return vp8Metadata;
}
static RefPtr<EncodedFrame> CreateFrame(EncodedFrame::FrameType aType,
uint64_t aTimeUs, uint64_t aDurationUs,
size_t aDataSize) {
auto frame = MakeRefPtr<EncodedFrame>();
frame->mTime = aTimeUs;
if (aType == EncodedFrame::OPUS_AUDIO_FRAME) {
// Opus duration is in samples, so figure out how many samples will put us
// closest to aDurationUs without going over.
frame->mDuration = UsecsToFrames(aDurationUs, 48000).value();
} else {
frame->mDuration = aDurationUs;
}
frame->mFrameType = aType;
nsTArray<uint8_t> data;
data.SetLength(aDataSize);
frame->SwapInFrameData(data);
return frame;
}
namespace testing {
namespace internal {
// This makes the googletest framework treat nsTArray as an std::vector, so all
// the regular Matchers (like ElementsAre) work for it.
template <typename Element>
class StlContainerView<nsTArray<Element>> {
public:
typedef GTEST_REMOVE_CONST_(Element) RawElement;
typedef std::vector<RawElement> type;
typedef const type const_reference;
static const_reference ConstReference(const nsTArray<Element>& aContainer) {
StaticAssertTypeEq<Element, RawElement>();
return type(aContainer.begin(), aContainer.end());
}
static type Copy(const nsTArray<Element>& aContainer) {
return type(aContainer.begin(), aContainer.end());
}
};
} // namespace internal
} // namespace testing
class MockContainerWriter : public ContainerWriter {
public:
MOCK_METHOD2(WriteEncodedTrack,
nsresult(const nsTArray<RefPtr<EncodedFrame>>&, uint32_t));
MOCK_METHOD1(SetMetadata,
nsresult(const nsTArray<RefPtr<TrackMetadataBase>>&));
MOCK_METHOD0(IsWritingComplete, bool());
MOCK_METHOD2(GetContainerData,
nsresult(nsTArray<nsTArray<uint8_t>>*, uint32_t));
};
TEST(MuxerTest, AudioOnly)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto audioFrame = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48000, 4096);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(audioFrame),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(nsTArray<RefPtr<TrackMetadataBase>>({opusMeta})),
NS_OK);
muxer.AddEncodedAudioFrame(audioFrame);
muxer.AudioEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}
TEST(MuxerTest, AudioVideo)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto vp8Meta = CreateVP8Metadata(640, 480);
auto audioFrame = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48000, 4096);
auto videoFrame = CreateFrame(EncodedFrame::VP8_I_FRAME, 0, 50000, 65536);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta, vp8Meta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(videoFrame, audioFrame),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(
nsTArray<RefPtr<TrackMetadataBase>>({opusMeta, vp8Meta})),
NS_OK);
muxer.AddEncodedAudioFrame(audioFrame);
muxer.AudioEndOfStream();
muxer.AddEncodedVideoFrame(videoFrame);
muxer.VideoEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}
TEST(MuxerTest, AudioVideoOutOfOrder)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto vp8Meta = CreateVP8Metadata(640, 480);
auto a0 = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48, 4096);
auto v0 = CreateFrame(EncodedFrame::VP8_I_FRAME, 0, 50, 65536);
auto a48 = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 48, 48, 4096);
auto v50 = CreateFrame(EncodedFrame::VP8_I_FRAME, 50, 50, 65536);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta, vp8Meta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(v0, a0, a48, v50),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(
nsTArray<RefPtr<TrackMetadataBase>>({opusMeta, vp8Meta})),
NS_OK);
muxer.AddEncodedAudioFrame(a0);
muxer.AddEncodedVideoFrame(v0);
muxer.AddEncodedVideoFrame(v50);
muxer.VideoEndOfStream();
muxer.AddEncodedAudioFrame(a48);
muxer.AudioEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}

View File

@ -143,8 +143,8 @@ TEST(VP8VideoTrackEncoder, FrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(images.Length()));
// Pull Encoded Data back from encoder.
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
}
// Test that encoding a single frame gives useful output.
@ -165,21 +165,20 @@ TEST(VP8VideoTrackEncoder, SingleFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Read out encoded data, and verify.
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
const size_t oneElement = 1;
ASSERT_EQ(oneElement, frames.Length());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType())
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType)
<< "We only have one frame, so it should be a keyframe";
const uint64_t halfSecond = PR_USEC_PER_SEC / 2;
EXPECT_EQ(halfSecond, frames[0]->GetDuration());
EXPECT_EQ(halfSecond, frames[0]->mDuration);
}
// Test that encoding a couple of identical images gives useful output.
@ -204,15 +203,15 @@ TEST(VP8VideoTrackEncoder, SameFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1.5s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
EXPECT_EQ(oneAndAHalf, totalDuration);
@ -240,15 +239,15 @@ TEST(VP8VideoTrackEncoder, SkippedFrames)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(100));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 100 * 1ms = 100ms.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t hundredMillis = PR_USEC_PER_SEC / 10;
EXPECT_EQ(hundredMillis, totalDuration);
@ -282,15 +281,15 @@ TEST(VP8VideoTrackEncoder, RoundingErrorFramesEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t oneSecond = PR_USEC_PER_SEC;
EXPECT_EQ(oneSecond, totalDuration);
@ -319,8 +318,8 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -331,9 +330,9 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
(PR_USEC_PER_SEC / 10)};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : container.GetEncodedFrames()) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->mDuration);
totalDuration += frame->mDuration;
}
const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
EXPECT_EQ(pointThree, totalDuration);
@ -368,8 +367,8 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -380,9 +379,9 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
(PR_USEC_PER_SEC / 10) * 2};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : container.GetEncodedFrames()) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->mDuration);
totalDuration += frame->mDuration;
}
const uint64_t pointSix = (PR_USEC_PER_SEC / 10) * 6;
EXPECT_EQ(pointSix, totalDuration);
@ -433,18 +432,18 @@ TEST(VP8VideoTrackEncoder, Suspended)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.2s.
const uint64_t two = 2;
EXPECT_EQ(two, container.GetEncodedFrames().Length());
EXPECT_EQ(two, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t pointTwo = (PR_USEC_PER_SEC / 10) * 2;
EXPECT_EQ(pointTwo, totalDuration);
@ -483,18 +482,18 @@ TEST(VP8VideoTrackEncoder, SuspendedUntilEnd)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, container.GetEncodedFrames().Length());
EXPECT_EQ(one, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t pointOne = PR_USEC_PER_SEC / 10;
EXPECT_EQ(pointOne, totalDuration);
@ -522,14 +521,14 @@ TEST(VP8VideoTrackEncoder, AlwaysSuspended)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have no encoded frames.
const uint64_t none = 0;
EXPECT_EQ(none, container.GetEncodedFrames().Length());
EXPECT_EQ(none, frames.Length());
}
// Test that encoding a track that is suspended in the beginning works.
@ -566,18 +565,18 @@ TEST(VP8VideoTrackEncoder, SuspendedBeginning)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, container.GetEncodedFrames().Length());
EXPECT_EQ(one, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -619,18 +618,18 @@ TEST(VP8VideoTrackEncoder, SuspendedOverlap)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.1s.
const uint64_t two = 2;
ASSERT_EQ(two, container.GetEncodedFrames().Length());
ASSERT_EQ(two, frames.Length());
const uint64_t pointFive = (PR_USEC_PER_SEC / 10) * 5;
EXPECT_EQ(pointFive, container.GetEncodedFrames()[0]->GetDuration());
EXPECT_EQ(pointFive, frames[0]->mDuration);
const uint64_t pointSeven = (PR_USEC_PER_SEC / 10) * 7;
EXPECT_EQ(pointSeven, container.GetEncodedFrames()[1]->GetDuration());
EXPECT_EQ(pointSeven, frames[1]->mDuration);
}
// Test that ending a track in the middle of already pushed data works.
@ -651,14 +650,14 @@ TEST(VP8VideoTrackEncoder, PrematureEnding)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -683,14 +682,14 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -716,14 +715,14 @@ TEST(VP8VideoTrackEncoder, DelayedStartOtherEventOrder)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -748,14 +747,14 @@ TEST(VP8VideoTrackEncoder, VeryDelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(10.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -785,34 +784,34 @@ TEST(VP8VideoTrackEncoder, LongFramesReEncoded)
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_FALSE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t oneSec = PR_USEC_PER_SEC;
EXPECT_EQ(oneSec, totalDuration);
EXPECT_EQ(1U, container.GetEncodedFrames().Length());
EXPECT_EQ(1U, frames.Length());
}
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(11));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
for (auto& frame : frames) {
totalDuration += frame->mDuration;
}
const uint64_t tenSec = PR_USEC_PER_SEC * 10;
EXPECT_EQ(tenSec, totalDuration);
EXPECT_EQ(10U, container.GetEncodedFrames().Length());
EXPECT_EQ(10U, frames.Length());
}
}
@ -853,37 +852,36 @@ TEST(VP8VideoTrackEncoder, ShortKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 400UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 400UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
// [400ms, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
// [600ms, 750ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[2]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[2]->mFrameType);
// [750ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->mFrameType);
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
// [1100ms, 1200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
}
// Test that an encoding with a defined key frame interval encodes keyframes
@ -923,37 +921,36 @@ TEST(VP8VideoTrackEncoder, LongKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
// [600ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
// [1100ms, 1900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->mFrameType);
// [1900ms, 2100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
// [2100ms, 2200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
}
// Test that an encoding with no defined key frame interval encodes keyframes
@ -991,37 +988,36 @@ TEST(VP8VideoTrackEncoder, DefaultKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
// [600ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
// [1100ms, 1900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->mFrameType);
// [1900ms, 2100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
// [2100ms, 2200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
}
// Test that an encoding where the key frame interval is updated dynamically
@ -1031,7 +1027,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Set keyframe interval to 100ms.
@ -1080,7 +1076,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 501ms, so the first bit of the frame starting at 500ms is
// included.
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
{
VideoSegment segment;
@ -1106,7 +1102,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 2000ms from 501ms to 2501ms
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(2501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
{
VideoSegment segment;
@ -1130,68 +1126,67 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(14UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
// [100ms, 120ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 20UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[1]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 20UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[1]->mFrameType);
// [120ms, 130ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 10UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 10UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
// [130ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 70UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 70UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->mFrameType);
// [200ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[4]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[4]->mFrameType);
// [300ms, 500ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
// [500ms, 1300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[6]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[6]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[6]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[6]->mFrameType);
// [1300ms, 1400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[7]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[7]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[7]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[7]->mFrameType);
// [1400ms, 2400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 1000UL, frames[8]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[8]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 1000UL, frames[8]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[8]->mFrameType);
// [2400ms, 2500ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[9]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[9]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[9]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[9]->mFrameType);
// [2500ms, 2600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[10]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[10]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[10]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[10]->mFrameType);
// [2600ms, 2800ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[11]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[11]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[11]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[11]->mFrameType);
// [2800ms, 2900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[12]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[12]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[12]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[12]->mFrameType);
// [2900ms, 3000ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[13]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[13]->GetFrameType());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[13]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[13]->mFrameType);
}
// Test that an encoding which is disabled on a frame timestamp encodes
@ -1201,7 +1196,7 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1226,17 +1221,16 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
encoder.Disable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->mDuration);
}
// Test that an encoding which is disabled between two frame timestamps encodes
@ -1246,7 +1240,7 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1268,20 +1262,19 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
encoder.Disable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->mDuration);
// [50ms, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
}
// Test that an encoding which is enabled on a frame timestamp encodes
@ -1291,7 +1284,7 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1318,17 +1311,16 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
encoder.Enable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->mDuration);
}
// Test that an encoding which is enabled between two frame timestamps encodes
@ -1338,7 +1330,7 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1362,20 +1354,19 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
encoder.Enable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->mDuration);
// [50ms, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
}
// Test that making time go backwards removes any future frames in the encoder.
@ -1384,7 +1375,7 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1431,23 +1422,22 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(4UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
// [100ms, 150ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
// [150ms, 250ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
// [250ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[3]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[3]->mDuration);
}
// Test that trying to encode a null image removes any future frames in the
@ -1457,7 +1447,7 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1504,20 +1494,19 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
// [100ms, 250ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[1]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[1]->mDuration);
// [250ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[2]->GetDuration());
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[2]->mDuration);
}
// EOS test
@ -1531,8 +1520,8 @@ TEST(VP8VideoTrackEncoder, EncodeComplete)
// Pull Encoded Data back from encoder. Since we have sent
// EOS to encoder, encoder.GetEncodedTrack should return
// NS_OK immidiately.
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
}

View File

@ -39,28 +39,30 @@ class WebMVP8TrackEncoder : public VP8TrackEncoder {
}
};
static void GetOpusMetadata(int aChannels, int aSampleRate,
TrackRate aTrackRate,
nsTArray<RefPtr<TrackMetadataBase>>& aMeta) {
WebMOpusTrackEncoder opusEncoder(aTrackRate);
EXPECT_TRUE(opusEncoder.TestOpusCreation(aChannels, aSampleRate));
aMeta.AppendElement(opusEncoder.GetMetadata());
}
static void GetVP8Metadata(int32_t aWidth, int32_t aHeight,
int32_t aDisplayWidth, int32_t aDisplayHeight,
TrackRate aTrackRate,
nsTArray<RefPtr<TrackMetadataBase>>& aMeta) {
WebMVP8TrackEncoder vp8Encoder;
EXPECT_TRUE(vp8Encoder.TestVP8Creation(aWidth, aHeight, aDisplayWidth,
aDisplayHeight));
aMeta.AppendElement(vp8Encoder.GetMetadata());
}
const uint64_t FIXED_DURATION = 1000000;
const uint32_t FIXED_FRAMESIZE = 500;
class TestWebMWriter : public WebMWriter {
public:
explicit TestWebMWriter(int aTrackTypes)
: WebMWriter(aTrackTypes), mTimestamp(0) {}
void SetOpusMetadata(int aChannels, int aSampleRate, TrackRate aTrackRate) {
WebMOpusTrackEncoder opusEncoder(aTrackRate);
EXPECT_TRUE(opusEncoder.TestOpusCreation(aChannels, aSampleRate));
RefPtr<TrackMetadataBase> opusMeta = opusEncoder.GetMetadata();
SetMetadata(opusMeta);
}
void SetVP8Metadata(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
int32_t aDisplayHeight, TrackRate aTrackRate) {
WebMVP8TrackEncoder vp8Encoder;
EXPECT_TRUE(vp8Encoder.TestVP8Creation(aWidth, aHeight, aDisplayWidth,
aDisplayHeight));
RefPtr<TrackMetadataBase> vp8Meta = vp8Encoder.GetMetadata();
SetMetadata(vp8Meta);
}
TestWebMWriter() : WebMWriter(), mTimestamp(0) {}
// When we append an I-Frame into WebM muxer, the muxer will treat previous
// data as "a cluster".
@ -68,22 +70,22 @@ class TestWebMWriter : public WebMWriter {
// previous cluster so that we can retrieve data by |GetContainerData|.
void AppendDummyFrame(EncodedFrame::FrameType aFrameType,
uint64_t aDuration) {
EncodedFrameContainer encodedVideoData;
nsTArray<RefPtr<EncodedFrame>> encodedVideoData;
nsTArray<uint8_t> frameData;
RefPtr<EncodedFrame> videoData = new EncodedFrame();
// Create dummy frame data.
frameData.SetLength(FIXED_FRAMESIZE);
videoData->SetFrameType(aFrameType);
videoData->SetTimeStamp(mTimestamp);
videoData->SetDuration(aDuration);
videoData->mFrameType = aFrameType;
videoData->mTime = mTimestamp;
videoData->mDuration = aDuration;
videoData->SwapInFrameData(frameData);
encodedVideoData.AppendEncodedFrame(videoData);
encodedVideoData.AppendElement(videoData);
WriteEncodedTrack(encodedVideoData, 0);
mTimestamp += aDuration;
}
bool HaveValidCluster() {
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
GetContainerData(&encodedBuf, 0);
return (encodedBuf.Length() > 0) ? true : false;
}
@ -95,35 +97,32 @@ class TestWebMWriter : public WebMWriter {
TEST(WebMWriter, Metadata)
{
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
TestWebMWriter writer;
// The output should be empty since we didn't set any metadata in writer.
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() == 0);
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() == 0);
// Set opus metadata.
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
int channel = 1;
int sampleRate = 44100;
TrackRate aTrackRate = 90000;
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// No output data since we didn't set both audio/video
// metadata in writer.
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() == 0);
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() == 0);
// Set vp8 metadata
// Get vp8 metadata
int32_t width = 640;
int32_t height = 480;
int32_t displayWidth = 640;
int32_t displayHeight = 480;
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
// Set metadata
writer.SetMetadata(meta);
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() > 0);
@ -131,21 +130,22 @@ TEST(WebMWriter, Metadata)
TEST(WebMWriter, Cluster)
{
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
int channel = 1;
int sampleRate = 48000;
TrackRate aTrackRate = 90000;
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Set vp8 metadata
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// Get vp8 metadata
int32_t width = 320;
int32_t height = 240;
int32_t displayWidth = 320;
int32_t displayHeight = 240;
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() > 0);
encodedBuf.Clear();
@ -173,19 +173,20 @@ TEST(WebMWriter, Cluster)
TEST(WebMWriter, FLUSH_NEEDED)
{
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
int channel = 2;
int sampleRate = 44100;
TrackRate aTrackRate = 100000;
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Set vp8 metadata
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// Get vp8 metadata
int32_t width = 176;
int32_t height = 352;
int32_t displayWidth = 176;
int32_t displayHeight = 352;
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
// write the first I-Frame.
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
@ -198,7 +199,7 @@ TEST(WebMWriter, FLUSH_NEEDED)
// retrieved
EXPECT_FALSE(writer.HaveValidCluster());
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
// Have data because the flag ContainerWriter::FLUSH_NEEDED
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() > 0);
@ -293,19 +294,20 @@ static int64_t webm_tell(void* aUserData) {
TEST(WebMWriter, bug970774_aspect_ratio)
{
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
int channel = 1;
int sampleRate = 44100;
TrackRate aTrackRate = 90000;
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// Set vp8 metadata
int32_t width = 640;
int32_t height = 480;
int32_t displayWidth = 1280;
int32_t displayHeight = 960;
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
// write the first I-Frame.
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
@ -314,7 +316,7 @@ TEST(WebMWriter, bug970774_aspect_ratio)
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
// Get the metadata and the first cluster.
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, 0);
// Flatten the encodedBuf.
WebMioData ioData;

View File

@ -4,6 +4,7 @@
include('/media/webrtc/webrtc.mozbuild')
DEFINES['ENABLE_SET_CUBEB_BACKEND'] = True
LOCAL_INCLUDES += [
'/media/webrtc/signaling/src/common',
@ -12,6 +13,7 @@ LOCAL_INCLUDES += [
]
UNIFIED_SOURCES += [
'AudioGenerator.cpp',
'MockMediaResource.cpp',
'TestAudioBuffers.cpp',
'TestAudioCallbackDriver.cpp',
@ -35,6 +37,7 @@ UNIFIED_SOURCES += [
'TestMediaSpan.cpp',
'TestMP3Demuxer.cpp',
'TestMP4Demuxer.cpp',
'TestMuxer.cpp',
'TestOpusParser.cpp',
'TestRust.cpp',
'TestTimeUnit.cpp',

View File

@ -34,19 +34,23 @@ CaptureTask::CaptureTask(dom::ImageCapture* aImageCapture)
mImageGrabbedOrTrackEnd(false),
mPrincipalChanged(false) {}
nsresult CaptureTask::TaskComplete(already_AddRefed<dom::Blob> aBlob,
nsresult CaptureTask::TaskComplete(already_AddRefed<dom::BlobImpl> aBlobImpl,
nsresult aRv) {
MOZ_ASSERT(NS_IsMainThread());
DetachTrack();
nsresult rv;
RefPtr<dom::Blob> blob(aBlob);
RefPtr<dom::BlobImpl> blobImpl(aBlobImpl);
// We have to set the parent because the blob has been generated with a valid
// one.
if (blob) {
blob = dom::Blob::Create(mImageCapture->GetParentObject(), blob->Impl());
RefPtr<dom::Blob> blob;
if (blobImpl) {
blob = dom::Blob::Create(mImageCapture->GetOwnerGlobal(), blobImpl);
if (NS_WARN_IF(!blob)) {
return NS_ERROR_FAILURE;
}
}
if (mPrincipalChanged) {
@ -102,9 +106,10 @@ void CaptureTask::NotifyRealtimeTrackData(MediaTrackGraph* aGraph,
public:
explicit EncodeComplete(CaptureTask* aTask) : mTask(aTask) {}
nsresult ReceiveBlob(already_AddRefed<dom::Blob> aBlob) override {
RefPtr<dom::Blob> blob(aBlob);
mTask->TaskComplete(blob.forget(), NS_OK);
nsresult ReceiveBlobImpl(
already_AddRefed<dom::BlobImpl> aBlobImpl) override {
RefPtr<dom::BlobImpl> blobImpl(aBlobImpl);
mTask->TaskComplete(blobImpl.forget(), NS_OK);
mTask = nullptr;
return NS_OK;
}

View File

@ -12,7 +12,7 @@
namespace mozilla {
namespace dom {
class Blob;
class BlobImpl;
class ImageCapture;
class MediaStreamTrack;
} // namespace dom
@ -46,7 +46,8 @@ class CaptureTask : public DirectMediaTrackListener,
//
// Note:
// this function should be called on main thread.
nsresult TaskComplete(already_AddRefed<dom::Blob> aBlob, nsresult aRv);
nsresult TaskComplete(already_AddRefed<dom::BlobImpl> aBlobImpl,
nsresult aRv);
// Add listeners into MediaStreamTrack and PrincipalChangeObserver.
// It should be on main thread only.

View File

@ -491,6 +491,9 @@ nsresult DecodedStream::Start(const TimeUnit& aStartTime,
mVideoEndedPromise = mData->mVideoEndedPromise;
mOutputListener = mData->OnOutput().Connect(mOwnerThread, this,
&DecodedStream::NotifyOutput);
if (mData->mAudioTrack) {
mData->mAudioTrack->SetVolume(static_cast<float>(mVolume));
}
SendData();
}
return NS_OK;
@ -559,6 +562,9 @@ void DecodedStream::SetPlaying(bool aPlaying) {
void DecodedStream::SetVolume(double aVolume) {
AssertOwnerThread();
mVolume = aVolume;
if (mData && mData->mAudioTrack) {
mData->mAudioTrack->SetVolume(static_cast<float>(aVolume));
}
}
void DecodedStream::SetPlaybackRate(double aPlaybackRate) {
@ -623,8 +629,7 @@ static void SendStreamAudio(DecodedStreamData* aStream,
aStream->mNextAudioTime = audio->GetEndTime();
}
void DecodedStream::SendAudio(double aVolume,
const PrincipalHandle& aPrincipalHandle) {
void DecodedStream::SendAudio(const PrincipalHandle& aPrincipalHandle) {
AssertOwnerThread();
if (!mInfo.HasAudio()) {
@ -650,8 +655,6 @@ void DecodedStream::SendAudio(double aVolume,
aPrincipalHandle);
}
output.ApplyVolume(aVolume);
// |mNextAudioTime| is updated as we process each audio sample in
// SendStreamAudio().
if (output.GetDuration() > 0) {
@ -896,7 +899,7 @@ void DecodedStream::SendData() {
}
LOG_DS(LogLevel::Verbose, "SendData()");
SendAudio(mVolume, mPrincipalHandle);
SendAudio(mPrincipalHandle);
SendVideo(mPrincipalHandle);
}

View File

@ -67,7 +67,7 @@ class DecodedStream : public MediaSink {
private:
void DestroyData(UniquePtr<DecodedStreamData>&& aData);
void SendAudio(double aVolume, const PrincipalHandle& aPrincipalHandle);
void SendAudio(const PrincipalHandle& aPrincipalHandle);
void SendVideo(const PrincipalHandle& aPrincipalHandle);
void ResetAudio();
void ResetVideo(const PrincipalHandle& aPrincipalHandle);

View File

@ -56,6 +56,8 @@ support-files =
skip-if = toolkit == 'android' # Not supported on android
[test_AudioChange_mp4.html]
skip-if = toolkit == 'android' || (os == "win" && processor == "aarch64") # Not supported on android, aarch64 due to 1538331
[test_AudioChange_mp4_WebAudio.html]
skip-if = toolkit == 'android' || (os == "win" && processor == "aarch64") # Not supported on android, aarch64 due to 1538331
[test_AutoRevocation.html]
tags = firstpartyisolation
[test_BufferedSeek.html]

View File

@ -0,0 +1,55 @@
<!DOCTYPE HTML>
<html>
<head>
<title>MSE: basic functionality</title>
<script src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="mediasource.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
// This test checks loading a stereo segment, followed by a 5.1 segment plays
// without error, when the audio is being routed to an AudioContext.
const ac = new AudioContext();
runWithMSE(async (ms, el) => {
el.controls = true;
const source = ac.createMediaElementSource(el);
source.connect(ac.destination);
await once(ms, "sourceopen");
ok(true, "Receive a sourceopen event");
logEvents(el);
const audiosb = ms.addSourceBuffer("audio/mp4");
el.addEventListener("error", e => {
ok(false, `should not fire ${e.type} event`);
SimpleTest.finish();
});
is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
let p = once(el, "loadedmetadata");
await fetchAndLoad(audiosb, "aac20-48000-64000-", ["init"], ".mp4");
await p;
ok(true, "got loadedmetadata event");
p = Promise.all([once(el, "loadeddata"), once(el, "canplay")]);
await fetchAndLoad(audiosb, "aac20-48000-64000-", ["1"], ".m4s");
await p;
ok(true, "got canplay event");
el.play();
await fetchAndLoad(audiosb, "aac51-48000-128000-", ["init"], ".mp4");
await fetchAndLoad(audiosb, "aac51-48000-128000-", ["2"], ".m4s");
ms.endOfStream();
await once(el, "ended");
ok(el.currentTime >= 6, "played to the end");
SimpleTest.finish();
});
</script>
</pre>
</body>
</html>

View File

@ -1673,4 +1673,6 @@ bool SkeletonState::DecodeHeader(OggPacketPtr aPacket) {
return true;
}
#undef LOG
} // namespace mozilla

Some files were not shown because too many files have changed in this diff Show More