WebRTC camera and vsock camera HAL

Add a camera that transfers client camera stream over webRTC
and vsock to the CVD.
Camera stream originated from the client getUserMedia() is
received by CameraStreamer that acts as a sink for webRTC video
track. CameraStreamer then forwards video frames in YUV420 format
to the CVD by using vsock connection.
Device control channel is used for transferring settings data (i.e.
frame rate and resolution) in Json format to the CVD.
Also an additional webRTC data channel is used for transferring
binary blobs captured by ImageCapture.takePhoto() to the CVD.
Custom camera HAL is loosely based on ExternalCamera implementation.
The HAL supports only single resolution blob and yuv streams. The
used resolution is the default coming from getUserMedia().

Example usage:
TARGET_USE_VSOCK_CAMERA_HAL_IMPL=true m -j32
launch_cvd -start_webrtc=true -camera_server_port=7600

Notes:
- If the client connection is not established during the boot,
  the camera app does not show on home screen. This is because
  CameraProvider only indicates camera presence after vsock
  connection.
- Video does not work if the camera resolution does not match the
  required video resolution. There is no scaling. For example by
  default, the camera app requires 1280x720 and messaging app
  requires 320x240 resolution.
- Image blobs from ImageCapture.takePhoto() seem to be in PNG
  format. This might confuse some HAL clients (parsing EXIF data
  fails etc.) Default gallery and camera app seem to show images
  fine though.

Test: manual
      run cts-camera (passed 1668/1732)

Bug: 194396682
Change-Id: Ie4ae5e94faaabde80979ff41974fa445614ea661
Merged-In: Ie4ae5e94faaabde80979ff41974fa445614ea661
This commit is contained in:
Mikko Koivisto 2021-05-27 05:02:59 +00:00 committed by Alistair Delva
parent 2eb8d394fb
commit dae9f0b66a
47 changed files with 3301 additions and 30 deletions

View File

@ -444,7 +444,7 @@ SharedFD SharedFD::SocketLocalServer(const std::string& name, bool abstract,
return rval; return rval;
} }
SharedFD SharedFD::VsockServer(unsigned int port, int type) { SharedFD SharedFD::VsockServer(unsigned int port, int type, unsigned int cid) {
auto vsock = SharedFD::Socket(AF_VSOCK, type, 0); auto vsock = SharedFD::Socket(AF_VSOCK, type, 0);
if (!vsock->IsOpen()) { if (!vsock->IsOpen()) {
return vsock; return vsock;
@ -452,7 +452,7 @@ SharedFD SharedFD::VsockServer(unsigned int port, int type) {
sockaddr_vm addr{}; sockaddr_vm addr{};
addr.svm_family = AF_VSOCK; addr.svm_family = AF_VSOCK;
addr.svm_port = port; addr.svm_port = port;
addr.svm_cid = VMADDR_CID_ANY; addr.svm_cid = cid;
auto casted_addr = reinterpret_cast<sockaddr*>(&addr); auto casted_addr = reinterpret_cast<sockaddr*>(&addr);
if (vsock->Bind(casted_addr, sizeof(addr)) == -1) { if (vsock->Bind(casted_addr, sizeof(addr)) == -1) {
LOG(ERROR) << "Bind failed (" << vsock->StrError() << ")"; LOG(ERROR) << "Bind failed (" << vsock->StrError() << ")";

View File

@ -141,7 +141,8 @@ class SharedFD {
static SharedFD SocketLocalServer(const std::string& name, bool is_abstract, static SharedFD SocketLocalServer(const std::string& name, bool is_abstract,
int in_type, mode_t mode); int in_type, mode_t mode);
static SharedFD SocketLocalServer(int port, int type); static SharedFD SocketLocalServer(int port, int type);
static SharedFD VsockServer(unsigned int port, int type); static SharedFD VsockServer(unsigned int port, int type,
unsigned int cid = VMADDR_CID_ANY);
static SharedFD VsockServer(int type); static SharedFD VsockServer(int type);
static SharedFD VsockClient(unsigned int cid, unsigned int port, int type); static SharedFD VsockClient(unsigned int cid, unsigned int port, int type);

View File

@ -30,18 +30,21 @@ cc_library {
"base64.cpp", "base64.cpp",
"tcp_socket.cpp", "tcp_socket.cpp",
"tee_logging.cpp", "tee_logging.cpp",
"vsock_connection.cpp",
], ],
shared: { shared: {
shared_libs: [ shared_libs: [
"libbase", "libbase",
"libcuttlefish_fs", "libcuttlefish_fs",
"libcrypto", "libcrypto",
"libjsoncpp",
], ],
}, },
static: { static: {
static_libs: [ static_libs: [
"libbase", "libbase",
"libcuttlefish_fs", "libcuttlefish_fs",
"libjsoncpp",
], ],
shared_libs: [ shared_libs: [
"libcrypto", // libcrypto_static is not accessible from all targets "libcrypto", // libcrypto_static is not accessible from all targets
@ -49,3 +52,12 @@ cc_library {
}, },
defaults: ["cuttlefish_host"], defaults: ["cuttlefish_host"],
} }
cc_library {
name: "libvsock_utils",
srcs: ["vsock_connection.cpp"],
shared_libs: ["libbase", "libcuttlefish_fs", "liblog", "libjsoncpp"],
defaults: ["cuttlefish_guest_only"],
include_dirs: ["device/google/cuttlefish"],
export_include_dirs: ["."],
}

View File

@ -0,0 +1,210 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/libs/utils/vsock_connection.h"
#include "common/libs/fs/shared_buf.h"
#include "common/libs/fs/shared_select.h"
#include "android-base/logging.h"
namespace cuttlefish {
VsockConnection::~VsockConnection() { Disconnect(); }
std::future<bool> VsockConnection::ConnectAsync(unsigned int port,
unsigned int cid) {
return std::async(std::launch::async,
[this, port, cid]() { return Connect(port, cid); });
}
void VsockConnection::Disconnect() {
LOG(INFO) << "Disconnecting with fd status:" << fd_->StrError();
fd_->Shutdown(SHUT_RDWR);
if (disconnect_callback_) {
disconnect_callback_();
}
fd_->Close();
}
void VsockConnection::SetDisconnectCallback(std::function<void()> callback) {
disconnect_callback_ = callback;
}
bool VsockConnection::IsConnected() const { return fd_->IsOpen(); }
bool VsockConnection::DataAvailable() const {
SharedFDSet read_set;
read_set.Set(fd_);
struct timeval timeout = {0, 0};
return Select(&read_set, nullptr, nullptr, &timeout) > 0;
}
int32_t VsockConnection::Read() {
std::lock_guard<std::recursive_mutex> lock(read_mutex_);
int32_t result;
if (ReadExactBinary(fd_, &result) != sizeof(result)) {
Disconnect();
return 0;
}
return result;
}
bool VsockConnection::Read(std::vector<char>& data) {
std::lock_guard<std::recursive_mutex> lock(read_mutex_);
return ReadExact(fd_, &data) == data.size();
}
std::vector<char> VsockConnection::Read(size_t size) {
if (size == 0) {
return {};
}
std::lock_guard<std::recursive_mutex> lock(read_mutex_);
std::vector<char> result(size);
if (ReadExact(fd_, &result) != size) {
Disconnect();
return {};
}
return result;
}
std::future<std::vector<char>> VsockConnection::ReadAsync(size_t size) {
return std::async(std::launch::async, [this, size]() { return Read(size); });
}
// Message format is buffer size followed by buffer data
std::vector<char> VsockConnection::ReadMessage() {
std::lock_guard<std::recursive_mutex> lock(read_mutex_);
auto size = Read();
if (size < 0) {
Disconnect();
return {};
}
return Read(size);
}
bool VsockConnection::ReadMessage(std::vector<char>& data) {
std::lock_guard<std::recursive_mutex> lock(read_mutex_);
auto size = Read();
if (size < 0) {
Disconnect();
return false;
}
data.resize(size);
return Read(data);
}
std::future<std::vector<char>> VsockConnection::ReadMessageAsync() {
return std::async(std::launch::async, [this]() { return ReadMessage(); });
}
Json::Value VsockConnection::ReadJsonMessage() {
auto msg = ReadMessage();
Json::CharReaderBuilder builder;
Json::CharReader* reader = builder.newCharReader();
Json::Value json_msg;
std::string errors;
if (!reader->parse(msg.data(), msg.data() + msg.size(), &json_msg, &errors)) {
return {};
}
return json_msg;
}
std::future<Json::Value> VsockConnection::ReadJsonMessageAsync() {
return std::async(std::launch::async, [this]() { return ReadJsonMessage(); });
}
bool VsockConnection::Write(int32_t data) {
std::lock_guard<std::recursive_mutex> lock(write_mutex_);
if (WriteAllBinary(fd_, &data) != sizeof(data)) {
Disconnect();
return false;
}
return true;
}
bool VsockConnection::Write(const char* data, unsigned int size) {
std::lock_guard<std::recursive_mutex> lock(write_mutex_);
if (WriteAll(fd_, data, size) != size) {
Disconnect();
return false;
}
return true;
}
bool VsockConnection::Write(const std::vector<char>& data) {
return Write(data.data(), data.size());
}
// Message format is buffer size followed by buffer data
bool VsockConnection::WriteMessage(const std::string& data) {
return Write(data.size()) && Write(data.c_str(), data.length());
}
bool VsockConnection::WriteMessage(const std::vector<char>& data) {
std::lock_guard<std::recursive_mutex> lock(write_mutex_);
return Write(data.size()) && Write(data);
}
bool VsockConnection::WriteMessage(const Json::Value& data) {
Json::StreamWriterBuilder factory;
std::string message_str = Json::writeString(factory, data);
return WriteMessage(message_str);
}
bool VsockConnection::WriteStrides(const char* data, unsigned int size,
unsigned int num_strides, int stride_size) {
const char* src = data;
for (unsigned int i = 0; i < num_strides; ++i, src += stride_size) {
if (!Write(src, size)) {
return false;
}
}
return true;
}
bool VsockClientConnection::Connect(unsigned int port, unsigned int cid) {
fd_ = SharedFD::VsockClient(cid, port, SOCK_STREAM);
if (!fd_->IsOpen()) {
LOG(ERROR) << "Failed to connect:" << fd_->StrError();
}
return fd_->IsOpen();
}
VsockServerConnection::~VsockServerConnection() { ServerShutdown(); }
void VsockServerConnection::ServerShutdown() {
if (server_fd_->IsOpen()) {
LOG(INFO) << __FUNCTION__
<< ": server fd status:" << server_fd_->StrError();
server_fd_->Shutdown(SHUT_RDWR);
server_fd_->Close();
}
}
bool VsockServerConnection::Connect(unsigned int port, unsigned int cid) {
if (!server_fd_->IsOpen()) {
server_fd_ = cuttlefish::SharedFD::VsockServer(port, SOCK_STREAM, cid);
}
if (server_fd_->IsOpen()) {
fd_ = SharedFD::Accept(*server_fd_);
return fd_->IsOpen();
} else {
return false;
}
}
} // namespace cuttlefish

View File

@ -0,0 +1,80 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <json/json.h>
#include <functional>
#include <future>
#include <mutex>
#include <vector>
#include "common/libs/fs/shared_fd.h"
namespace cuttlefish {
class VsockConnection {
public:
virtual ~VsockConnection();
virtual bool Connect(unsigned int port, unsigned int cid) = 0;
virtual void Disconnect();
std::future<bool> ConnectAsync(unsigned int port, unsigned int cid);
void SetDisconnectCallback(std::function<void()> callback);
bool IsConnected() const;
bool DataAvailable() const;
int32_t Read();
bool Read(std::vector<char>& data);
std::vector<char> Read(size_t size);
std::future<std::vector<char>> ReadAsync(size_t size);
bool ReadMessage(std::vector<char>& data);
std::vector<char> ReadMessage();
std::future<std::vector<char>> ReadMessageAsync();
Json::Value ReadJsonMessage();
std::future<Json::Value> ReadJsonMessageAsync();
bool Write(int32_t data);
bool Write(const char* data, unsigned int size);
bool Write(const std::vector<char>& data);
bool WriteMessage(const std::string& data);
bool WriteMessage(const std::vector<char>& data);
bool WriteMessage(const Json::Value& data);
bool WriteStrides(const char* data, unsigned int size,
unsigned int num_strides, int stride_size);
protected:
std::recursive_mutex read_mutex_;
std::recursive_mutex write_mutex_;
std::function<void()> disconnect_callback_;
SharedFD fd_;
};
class VsockClientConnection : public VsockConnection {
public:
bool Connect(unsigned int port, unsigned int cid) override;
};
class VsockServerConnection : public VsockConnection {
public:
virtual ~VsockServerConnection();
void ServerShutdown();
bool Connect(unsigned int port, unsigned int cid) override;
private:
SharedFD server_fd_;
};
} // namespace cuttlefish

View File

@ -0,0 +1,84 @@
package {
default_applicable_licenses: ["Android-Apache-2.0"],
}
cc_binary {
name: "android.hardware.camera.provider@2.7-external-vsock-service",
defaults: ["hidl_defaults"],
proprietary: true,
relative_install_path: "hw",
srcs: ["external-service.cpp"],
compile_multilib: "first",
init_rc: ["android.hardware.camera.provider@2.7-external-vsock-service.rc"],
shared_libs: [
"android.hardware.camera.provider@2.7",
"libbinder",
"libhidlbase",
"liblog",
"libutils",
],
}
cc_library_shared {
name: "android.hardware.camera.provider@2.7-impl-cuttlefish",
defaults: ["hidl_defaults"],
proprietary: true,
relative_install_path: "hw",
srcs: [
"vsock_camera_provider_2_7.cpp",
"vsock_camera_device_3_4.cpp",
"vsock_camera_device_session_3_4.cpp",
"vsock_camera_metadata.cpp",
"vsock_camera_server.cpp",
"vsock_frame_provider.cpp",
"cached_stream_buffer.cpp",
"stream_buffer_cache.cpp",
],
shared_libs: [
"android.hardware.camera.common@1.0",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
"android.hardware.camera.provider@2.4-external",
"android.hardware.camera.provider@2.4-legacy",
"android.hardware.graphics.mapper@2.0",
"android.hardware.graphics.mapper@3.0",
"android.hardware.graphics.mapper@4.0",
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
"camera.device@1.0-impl",
"camera.device@3.2-impl",
"camera.device@3.3-impl",
"camera.device@3.4-impl",
"libcamera_metadata",
"libcutils",
"libhardware",
"libhidlbase",
"liblog",
"libutils",
"libvsock_utils",
"libcuttlefish_fs",
"libjsoncpp",
"libyuv",
"libsync",
"libfmq",
"libgralloctypes",
],
header_libs: [
"camera.device@3.4-external-impl_headers",
"camera.device@3.4-impl_headers",
"camera.device@3.5-external-impl_headers",
"camera.device@3.5-impl_headers",
],
static_libs: [
"android.hardware.camera.common@1.0-helper",
],
include_dirs: ["device/google/cuttlefish"],
export_include_dirs: ["."],
}

View File

@ -0,0 +1,11 @@
service vendor.camera-provider-2-7-ext /vendor/bin/hw/android.hardware.camera.provider@2.7-external-vsock-service
interface android.hardware.camera.provider@2.4::ICameraProvider external/0
interface android.hardware.camera.provider@2.5::ICameraProvider external/0
interface android.hardware.camera.provider@2.6::ICameraProvider external/0
interface android.hardware.camera.provider@2.7::ICameraProvider external/0
class hal
user cameraserver
group audio camera input drmrpc
ioprio rt 4
capabilities SYS_NICE
task_profiles CameraServiceCapacity MaxPerformance

View File

@ -0,0 +1,117 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "CachedStreamBuffer"
#include "cached_stream_buffer.h"
#include <hardware/gralloc.h>
#include <log/log.h>
#include <sync/sync.h>
namespace android::hardware::camera::device::V3_4::implementation {
namespace {
HandleImporter g_importer;
}
ReleaseFence::ReleaseFence(int fence_fd) : handle_(nullptr) {
if (fence_fd >= 0) {
handle_ = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
handle_->data[0] = fence_fd;
}
}
ReleaseFence::~ReleaseFence() {
if (handle_ != nullptr) {
native_handle_close(handle_);
native_handle_delete(handle_);
}
}
CachedStreamBuffer::CachedStreamBuffer()
: buffer_(nullptr), buffer_id_(0), stream_id_(0), acquire_fence_(-1) {}
CachedStreamBuffer::CachedStreamBuffer(const StreamBuffer& buffer)
: buffer_(buffer.buffer.getNativeHandle()),
buffer_id_(buffer.bufferId),
stream_id_(buffer.streamId),
acquire_fence_(-1) {
g_importer.importBuffer(buffer_);
g_importer.importFence(buffer.acquireFence, acquire_fence_);
}
CachedStreamBuffer::CachedStreamBuffer(CachedStreamBuffer&& from) noexcept {
buffer_ = from.buffer_;
buffer_id_ = from.buffer_id_;
stream_id_ = from.stream_id_;
acquire_fence_ = from.acquire_fence_;
from.acquire_fence_ = -1;
from.buffer_ = nullptr;
}
CachedStreamBuffer& CachedStreamBuffer::operator=(
CachedStreamBuffer&& from) noexcept {
if (this != &from) {
buffer_ = from.buffer_;
buffer_id_ = from.buffer_id_;
stream_id_ = from.stream_id_;
acquire_fence_ = from.acquire_fence_;
from.acquire_fence_ = -1;
from.buffer_ = nullptr;
}
return *this;
}
CachedStreamBuffer::~CachedStreamBuffer() {
if (buffer_ != nullptr) {
g_importer.freeBuffer(buffer_);
}
g_importer.closeFence(acquire_fence_);
}
void CachedStreamBuffer::importFence(const native_handle_t* fence_handle) {
g_importer.closeFence(acquire_fence_);
g_importer.importFence(fence_handle, acquire_fence_);
}
YCbCrLayout CachedStreamBuffer::acquireAsYUV(int32_t width, int32_t height,
int timeout_ms) {
if (acquire_fence_ >= 0) {
if (sync_wait(acquire_fence_, timeout_ms)) {
ALOGW("%s: timeout while waiting acquire fence", __FUNCTION__);
return {};
} else {
::close(acquire_fence_);
acquire_fence_ = -1;
}
}
IMapper::Rect region{0, 0, width, height};
return g_importer.lockYCbCr(buffer_, GRALLOC_USAGE_SW_WRITE_OFTEN, region);
}
void* CachedStreamBuffer::acquireAsBlob(int32_t size, int timeout_ms) {
if (acquire_fence_ >= 0) {
if (sync_wait(acquire_fence_, timeout_ms)) {
return nullptr;
} else {
::close(acquire_fence_);
acquire_fence_ = -1;
}
}
return g_importer.lock(buffer_, GRALLOC_USAGE_SW_WRITE_OFTEN, size);
}
int CachedStreamBuffer::release() { return g_importer.unlock(buffer_); }
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,70 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include "HandleImporter.h"
namespace android::hardware::camera::device::V3_4::implementation {
using ::android::hardware::camera::common::V1_0::helper::HandleImporter;
using ::android::hardware::camera::device::V3_2::StreamBuffer;
// Small wrapper for allocating/freeing native handles
class ReleaseFence {
public:
ReleaseFence(int fence_fd);
~ReleaseFence();
native_handle_t* handle() const { return handle_; }
private:
native_handle_t* handle_;
};
// CachedStreamBuffer holds a buffer of camera3 stream.
class CachedStreamBuffer {
public:
CachedStreamBuffer();
CachedStreamBuffer(const StreamBuffer& buffer);
// Not copyable
CachedStreamBuffer(const CachedStreamBuffer&) = delete;
CachedStreamBuffer& operator=(const CachedStreamBuffer&) = delete;
// ...but movable
CachedStreamBuffer(CachedStreamBuffer&& from) noexcept;
CachedStreamBuffer& operator=(CachedStreamBuffer&& from) noexcept;
~CachedStreamBuffer();
bool valid() const { return buffer_ != nullptr; }
uint64_t bufferId() const { return buffer_id_; }
int32_t streamId() const { return stream_id_; }
int acquireFence() const { return acquire_fence_; }
void importFence(const native_handle_t* fence_handle);
// Acquire methods wait first on acquire fence and then return pointers to
// data. Data is nullptr if the wait timed out
YCbCrLayout acquireAsYUV(int32_t width, int32_t height, int timeout_ms);
void* acquireAsBlob(int32_t size, int timeout_ms);
int release();
private:
buffer_handle_t buffer_;
uint64_t buffer_id_;
int32_t stream_id_;
int acquire_fence_;
};
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,34 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "android.hardware.camera.provider@2.7-external-service"
#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
#include <hidl/LegacySupport.h>
#include <binder/ProcessState.h>
using android::hardware::defaultPassthroughServiceImplementation;
using android::hardware::camera::provider::V2_7::ICameraProvider;
int main() {
ALOGI("External camera provider service is starting.");
// The camera HAL may communicate to other vendor components via
// /dev/vndbinder
android::ProcessState::initWithDriver("/dev/vndbinder");
return defaultPassthroughServiceImplementation<ICameraProvider>(
"external/0", /*maxThreads*/ 6);
}

View File

@ -0,0 +1,26 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<manifest version="1.0" type="device">
<hal format="hidl">
<name>android.hardware.camera.provider</name>
<transport>hwbinder</transport>
<version>2.7</version>
<interface>
<name>ICameraProvider</name>
<instance>external/0</instance>
</interface>
</hal>
</manifest>

View File

@ -0,0 +1,70 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "stream_buffer_cache.h"
#include <algorithm>
namespace android::hardware::camera::device::V3_4::implementation {
std::shared_ptr<CachedStreamBuffer> StreamBufferCache::get(uint64_t buffer_id) {
auto id_match =
[buffer_id](const std::shared_ptr<CachedStreamBuffer>& buffer) {
return buffer->bufferId() == buffer_id;
};
std::lock_guard<std::mutex> lock(mutex_);
auto found = std::find_if(cache_.begin(), cache_.end(), id_match);
return (found != cache_.end()) ? *found : nullptr;
}
void StreamBufferCache::remove(uint64_t buffer_id) {
auto id_match =
[&buffer_id](const std::shared_ptr<CachedStreamBuffer>& buffer) {
return buffer->bufferId() == buffer_id;
};
std::lock_guard<std::mutex> lock(mutex_);
cache_.erase(std::remove_if(cache_.begin(), cache_.end(), id_match));
}
void StreamBufferCache::update(const StreamBuffer& buffer) {
auto id = buffer.bufferId;
auto id_match = [id](const std::shared_ptr<CachedStreamBuffer>& buffer) {
return buffer->bufferId() == id;
};
std::lock_guard<std::mutex> lock(mutex_);
auto found = std::find_if(cache_.begin(), cache_.end(), id_match);
if (found == cache_.end()) {
cache_.emplace_back(std::make_shared<CachedStreamBuffer>(buffer));
} else {
(*found)->importFence(buffer.acquireFence);
}
}
void StreamBufferCache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
cache_.clear();
}
void StreamBufferCache::removeStreamsExcept(std::set<int32_t> streams_to_keep) {
std::lock_guard<std::mutex> lock(mutex_);
for (auto it = cache_.begin(); it != cache_.end();) {
if (streams_to_keep.count((*it)->streamId()) == 0) {
it = cache_.erase(it);
} else {
it++;
}
}
}
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,38 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <mutex>
#include <set>
#include <vector>
#include "cached_stream_buffer.h"
namespace android::hardware::camera::device::V3_4::implementation {
class StreamBufferCache {
public:
std::shared_ptr<CachedStreamBuffer> get(uint64_t buffer_id);
void remove(uint64_t buffer_id);
void update(const StreamBuffer& buffer);
void clear();
void removeStreamsExcept(std::set<int32_t> streams_to_keep = {});
private:
std::mutex mutex_;
std::vector<std::shared_ptr<CachedStreamBuffer>> cache_;
};
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,106 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "VsockCameraDevice"
//#define LOG_NDEBUG 0
#include <log/log.h>
#include <algorithm>
#include <array>
#include "CameraMetadata.h"
#include "android-base/macros.h"
#include "include/convert.h"
#include "vsock_camera_device_3_4.h"
#include "vsock_camera_device_session_3_4.h"
namespace android::hardware::camera::device::V3_4::implementation {
VsockCameraDevice::VsockCameraDevice(
const std::string& id, const Settings& settings,
std::shared_ptr<cuttlefish::VsockConnection> connection)
: id_(id),
metadata_(settings.width, settings.height, settings.frame_rate),
connection_(connection),
is_open_(false) {
ALOGI("%s", __FUNCTION__);
}
VsockCameraDevice::~VsockCameraDevice() { ALOGI("%s", __FUNCTION__); }
Return<void> VsockCameraDevice::getResourceCost(
ICameraDevice::getResourceCost_cb _hidl_cb) {
CameraResourceCost resCost;
resCost.resourceCost = 100;
_hidl_cb(Status::OK, resCost);
return Void();
}
Return<void> VsockCameraDevice::getCameraCharacteristics(
ICameraDevice::getCameraCharacteristics_cb _hidl_cb) {
V3_2::CameraMetadata hidl_vec;
const camera_metadata_t* metadata_ptr = metadata_.getAndLock();
V3_2::implementation::convertToHidl(metadata_ptr, &hidl_vec);
_hidl_cb(Status::OK, hidl_vec);
metadata_.unlock(metadata_ptr);
return Void();
}
Return<Status> VsockCameraDevice::setTorchMode(TorchMode) {
return Status::OPERATION_NOT_SUPPORTED;
}
Return<void> VsockCameraDevice::open(const sp<ICameraDeviceCallback>& callback,
ICameraDevice::open_cb _hidl_cb) {
if (callback == nullptr) {
ALOGE("%s: cannot open camera %s. callback is null!", __FUNCTION__,
id_.c_str());
_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
return Void();
}
bool was_open = is_open_.exchange(true);
if (was_open) {
ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);
_hidl_cb(Status::CAMERA_IN_USE, nullptr);
return Void();
}
ALOGI("%s: Initializing device for camera %s", __FUNCTION__, id_.c_str());
frame_provider_ = std::make_shared<cuttlefish::VsockFrameProvider>();
frame_provider_->start(connection_, metadata_.getPreferredWidth(),
metadata_.getPreferredHeight());
session_ = new VsockCameraDeviceSession(metadata_, frame_provider_, callback);
_hidl_cb(Status::OK, session_);
return Void();
}
Return<void> VsockCameraDevice::dumpState(
const ::android::hardware::hidl_handle& handle) {
if (handle.getNativeHandle() == nullptr) {
ALOGE("%s: handle must not be null", __FUNCTION__);
return Void();
}
if (handle->numFds != 1 || handle->numInts != 0) {
ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints",
__FUNCTION__, handle->numFds, handle->numInts);
return Void();
}
int fd = handle->data[0];
dprintf(fd, "Camera:%s\n", id_.c_str());
return Void();
}
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,76 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "CameraMetadata.h"
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <hidl/MQDescriptor.h>
#include <hidl/Status.h>
#include "vsock_camera_device_session_3_4.h"
#include "vsock_camera_metadata.h"
#include "vsock_connection.h"
#include "vsock_frame_provider.h"
#include <vector>
namespace android::hardware::camera::device::V3_4::implementation {
using namespace ::android::hardware::camera::device;
using ::android::sp;
using ::android::hardware::hidl_string;
using ::android::hardware::hidl_vec;
using ::android::hardware::Return;
using ::android::hardware::Void;
using ::android::hardware::camera::common::V1_0::CameraResourceCost;
using ::android::hardware::camera::common::V1_0::Status;
using ::android::hardware::camera::common::V1_0::TorchMode;
using ::android::hardware::camera::device::V3_2::ICameraDevice;
using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback;
class VsockCameraDevice : public ICameraDevice {
public:
using Settings = struct {
int32_t width;
int32_t height;
double frame_rate;
};
VsockCameraDevice(const std::string& id, const Settings& settings,
std::shared_ptr<cuttlefish::VsockConnection> connection);
virtual ~VsockCameraDevice();
/* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice
* follow. */
Return<void> getResourceCost(ICameraDevice::getResourceCost_cb _hidl_cb);
Return<void> getCameraCharacteristics(
ICameraDevice::getCameraCharacteristics_cb _hidl_cb);
Return<Status> setTorchMode(TorchMode);
Return<void> open(const sp<ICameraDeviceCallback>&, ICameraDevice::open_cb);
Return<void> dumpState(const ::android::hardware::hidl_handle&);
/* End of Methods from
* ::android::hardware::camera::device::V3_2::ICameraDevice */
private:
std::string id_;
VsockCameraMetadata metadata_;
std::shared_ptr<cuttlefish::VsockConnection> connection_;
std::shared_ptr<cuttlefish::VsockFrameProvider> frame_provider_;
std::atomic<bool> is_open_;
sp<VsockCameraDeviceSession> session_;
};
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,574 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "VsockCameraDeviceSession"
#include "vsock_camera_device_session_3_4.h"
#include <hidl/Status.h>
#include <include/convert.h>
#include <inttypes.h>
#include <libyuv.h>
#include <log/log.h>
#include "vsock_camera_metadata.h"
// Partially copied from ExternalCameraDeviceSession
namespace android::hardware::camera::device::V3_4::implementation {
VsockCameraDeviceSession::VsockCameraDeviceSession(
VsockCameraMetadata camera_characteristics,
std::shared_ptr<cuttlefish::VsockFrameProvider> frame_provider,
const sp<ICameraDeviceCallback>& callback)
: camera_characteristics_(camera_characteristics),
frame_provider_(frame_provider),
callback_(callback) {
static constexpr size_t kMsgQueueSize = 256 * 1024;
request_queue_ =
std::make_unique<MessageQueue<uint8_t, kSynchronizedReadWrite>>(
kMsgQueueSize, false);
result_queue_ =
std::make_shared<MessageQueue<uint8_t, kSynchronizedReadWrite>>(
kMsgQueueSize, false);
unsigned int timeout_ms = 1000 / camera_characteristics.getPreferredFps();
process_requests_ = true;
request_processor_ =
std::thread([this, timeout_ms] { processRequestLoop(timeout_ms); });
}
VsockCameraDeviceSession::~VsockCameraDeviceSession() { close(); }
Return<void> VsockCameraDeviceSession::constructDefaultRequestSettings(
V3_2::RequestTemplate type,
V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) {
auto frame_rate = camera_characteristics_.getPreferredFps();
auto metadata = VsockCameraRequestMetadata(frame_rate, type);
V3_2::CameraMetadata hidl_metadata;
Status status = metadata.isValid() ? Status::OK : Status::ILLEGAL_ARGUMENT;
if (metadata.isValid()) {
camera_metadata_t* metadata_ptr = metadata.release();
hidl_metadata.setToExternal((uint8_t*)metadata_ptr,
get_camera_metadata_size(metadata_ptr));
}
_hidl_cb(status, hidl_metadata);
return Void();
}
Return<void> VsockCameraDeviceSession::getCaptureRequestMetadataQueue(
ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) {
_hidl_cb(*request_queue_->getDesc());
return Void();
}
Return<void> VsockCameraDeviceSession::getCaptureResultMetadataQueue(
ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) {
_hidl_cb(*result_queue_->getDesc());
return Void();
}
Return<void> VsockCameraDeviceSession::configureStreams(
const V3_2::StreamConfiguration& streams,
ICameraDeviceSession::configureStreams_cb _hidl_cb) {
// common configureStreams operate with v3_2 config and v3_3
// streams so we need to "downcast" v3_3 streams to v3_2 streams
V3_2::HalStreamConfiguration out_v32;
V3_3::HalStreamConfiguration out_v33;
Status status = configureStreams(streams, &out_v33);
size_t size = out_v33.streams.size();
out_v32.streams.resize(size);
for (size_t i = 0; i < size; i++) {
out_v32.streams[i] = out_v33.streams[i].v3_2;
}
_hidl_cb(status, out_v32);
return Void();
}
Return<void> VsockCameraDeviceSession::configureStreams_3_3(
const V3_2::StreamConfiguration& streams,
ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) {
V3_3::HalStreamConfiguration out_v33;
Status status = configureStreams(streams, &out_v33);
_hidl_cb(status, out_v33);
return Void();
}
Return<void> VsockCameraDeviceSession::configureStreams_3_4(
const V3_4::StreamConfiguration& requestedConfiguration,
ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) {
// common configureStreams operate with v3_2 config and v3_3
// streams so we need to "downcast" v3_4 config to v3_2 and
// "upcast" v3_3 streams to v3_4 streams
V3_2::StreamConfiguration config_v32;
V3_3::HalStreamConfiguration out_v33;
V3_4::HalStreamConfiguration out_v34;
config_v32.operationMode = requestedConfiguration.operationMode;
config_v32.streams.resize(requestedConfiguration.streams.size());
for (size_t i = 0; i < config_v32.streams.size(); i++) {
config_v32.streams[i] = requestedConfiguration.streams[i].v3_2;
}
max_blob_size_ = getBlobSize(requestedConfiguration);
Status status = configureStreams(config_v32, &out_v33);
out_v34.streams.resize(out_v33.streams.size());
for (size_t i = 0; i < out_v34.streams.size(); i++) {
out_v34.streams[i].v3_3 = out_v33.streams[i];
}
_hidl_cb(status, out_v34);
return Void();
}
Return<void> VsockCameraDeviceSession::processCaptureRequest(
const hidl_vec<CaptureRequest>& requests,
const hidl_vec<BufferCache>& cachesToRemove,
ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) {
updateBufferCaches(cachesToRemove);
uint32_t count;
Status s = Status::OK;
for (count = 0; count < requests.size(); count++) {
s = processOneCaptureRequest(requests[count]);
if (s != Status::OK) {
break;
}
}
_hidl_cb(s, count);
return Void();
}
Return<void> VsockCameraDeviceSession::processCaptureRequest_3_4(
const hidl_vec<V3_4::CaptureRequest>& requests,
const hidl_vec<V3_2::BufferCache>& cachesToRemove,
ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) {
updateBufferCaches(cachesToRemove);
uint32_t count;
Status s = Status::OK;
for (count = 0; count < requests.size(); count++) {
s = processOneCaptureRequest(requests[count].v3_2);
if (s != Status::OK) {
break;
}
}
_hidl_cb(s, count);
return Void();
}
Return<Status> VsockCameraDeviceSession::flush() {
auto timeout = std::chrono::seconds(1);
std::unique_lock<std::mutex> lock(request_mutex_);
flushing_requests_ = true;
auto is_empty = [this] { return pending_requests_.empty(); };
if (!queue_empty_.wait_for(lock, timeout, is_empty)) {
ALOGE("Flush timeout - %zu pending requests", pending_requests_.size());
}
flushing_requests_ = false;
return Status::OK;
}
Return<void> VsockCameraDeviceSession::close() {
process_requests_ = false;
if (request_processor_.joinable()) {
request_processor_.join();
}
frame_provider_->stop();
buffer_cache_.clear();
ALOGI("%s", __FUNCTION__);
return Void();
}
using ::android::hardware::graphics::common::V1_0::BufferUsage;
using ::android::hardware::graphics::common::V1_0::PixelFormat;
Status VsockCameraDeviceSession::configureStreams(
const V3_2::StreamConfiguration& config,
V3_3::HalStreamConfiguration* out) {
Status status = isStreamConfigurationSupported(config);
if (status != Status::OK) {
return status;
}
updateStreamInfo(config);
out->streams.resize(config.streams.size());
for (size_t i = 0; i < config.streams.size(); i++) {
out->streams[i].overrideDataSpace = config.streams[i].dataSpace;
out->streams[i].v3_2.id = config.streams[i].id;
out->streams[i].v3_2.producerUsage =
config.streams[i].usage | BufferUsage::CPU_WRITE_OFTEN;
out->streams[i].v3_2.consumerUsage = 0;
out->streams[i].v3_2.maxBuffers = 2;
out->streams[i].v3_2.overrideFormat =
config.streams[i].format == PixelFormat::IMPLEMENTATION_DEFINED
? PixelFormat::YCBCR_420_888
: config.streams[i].format;
}
return Status::OK;
}
using ::android::hardware::camera::device::V3_2::StreamRotation;
using ::android::hardware::camera::device::V3_2::StreamType;
Status VsockCameraDeviceSession::isStreamConfigurationSupported(
const V3_2::StreamConfiguration& config) {
camera_metadata_entry device_supported_streams = camera_characteristics_.find(
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
int32_t stall_stream_count = 0;
int32_t stream_count = 0;
for (const auto& stream : config.streams) {
if (stream.rotation != StreamRotation::ROTATION_0) {
ALOGE("Unsupported rotation enum value %d", stream.rotation);
return Status::ILLEGAL_ARGUMENT;
}
if (stream.streamType == StreamType::INPUT) {
ALOGE("Input stream not supported");
return Status::ILLEGAL_ARGUMENT;
}
bool is_supported = false;
// check pixel format and dimensions against camera metadata
for (int i = 0; i + 4 <= device_supported_streams.count; i += 4) {
auto format =
static_cast<PixelFormat>(device_supported_streams.data.i32[i]);
int32_t width = device_supported_streams.data.i32[i + 1];
int32_t height = device_supported_streams.data.i32[i + 2];
if (stream.format == format && stream.width == width &&
stream.height == height) {
is_supported = true;
break;
}
}
if (!is_supported) {
ALOGE("Unsupported format %d (%dx%d)", stream.format, stream.width,
stream.height);
return Status::ILLEGAL_ARGUMENT;
}
if (stream.format == PixelFormat::BLOB) {
stall_stream_count++;
} else {
stream_count++;
}
}
camera_metadata_entry device_stream_counts =
camera_characteristics_.find(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS);
static constexpr auto stream_index = 1;
auto expected_stream_count = device_stream_counts.count > stream_index
? device_stream_counts.data.i32[stream_index]
: 0;
if (stream_count > expected_stream_count) {
ALOGE("Too many processed streams (expect <= %d, got %d)",
expected_stream_count, stream_count);
return Status::ILLEGAL_ARGUMENT;
}
static constexpr auto stall_stream_index = 2;
expected_stream_count =
device_stream_counts.count > stall_stream_index
? device_stream_counts.data.i32[stall_stream_index]
: 0;
if (stall_stream_count > expected_stream_count) {
ALOGE("Too many stall streams (expect <= %d, got %d)",
expected_stream_count, stall_stream_count);
return Status::ILLEGAL_ARGUMENT;
}
return Status::OK;
}
unsigned int VsockCameraDeviceSession::getBlobSize(
const V3_4::StreamConfiguration& requestedConfiguration) {
camera_metadata_entry jpeg_entry =
camera_characteristics_.find(ANDROID_JPEG_MAX_SIZE);
unsigned int blob_size = jpeg_entry.count > 0 ? jpeg_entry.data.i32[0] : 0;
for (auto& stream : requestedConfiguration.streams) {
if (stream.v3_2.format == PixelFormat::BLOB) {
if (stream.bufferSize < blob_size) {
blob_size = stream.bufferSize;
}
}
}
return blob_size;
}
void VsockCameraDeviceSession::updateBufferCaches(
const hidl_vec<BufferCache>& to_remove) {
for (auto& cache : to_remove) {
buffer_cache_.remove(cache.bufferId);
}
}
void VsockCameraDeviceSession::updateStreamInfo(
const V3_2::StreamConfiguration& config) {
std::set<int32_t> stream_ids;
for (const auto& stream : config.streams) {
stream_cache_[stream.id] = stream;
stream_ids.emplace(stream.id);
}
buffer_cache_.removeStreamsExcept(stream_ids);
}
Status VsockCameraDeviceSession::processOneCaptureRequest(
const CaptureRequest& request) {
const camera_metadata_t* request_settings = nullptr;
V3_2::CameraMetadata hidl_settings;
if (request.fmqSettingsSize > 0) {
if (!getRequestSettingsFmq(request.fmqSettingsSize, hidl_settings)) {
ALOGE("%s: Could not read capture request settings from fmq!",
__FUNCTION__);
return Status::ILLEGAL_ARGUMENT;
} else if (!V3_2::implementation::convertFromHidl(hidl_settings,
&request_settings)) {
ALOGE("%s: fmq request settings metadata is corrupt!", __FUNCTION__);
return Status::ILLEGAL_ARGUMENT;
}
} else if (!V3_2::implementation::convertFromHidl(request.settings,
&request_settings)) {
ALOGE("%s: request settings metadata is corrupt!", __FUNCTION__);
return Status::ILLEGAL_ARGUMENT;
}
if (request_settings != nullptr) {
// Update request settings. This must happen on first request
std::lock_guard<std::mutex> lock(settings_mutex_);
latest_request_settings_ = request_settings;
} else if (latest_request_settings_.isEmpty()) {
ALOGE("%s: Undefined capture request settings!", __FUNCTION__);
return Status::ILLEGAL_ARGUMENT;
}
std::vector<uint64_t> buffer_ids;
for (size_t i = 0; i < request.outputBuffers.size(); i++) {
buffer_cache_.update(request.outputBuffers[i]);
buffer_ids.emplace_back(request.outputBuffers[i].bufferId);
}
std::lock_guard<std::mutex> lock(settings_mutex_);
ReadVsockRequest request_to_process = {
.buffer_ids = buffer_ids,
.frame_number = request.frameNumber,
.timestamp = 0,
.settings = latest_request_settings_,
.buffer_count = static_cast<uint32_t>(buffer_ids.size())};
putRequestToQueue(request_to_process);
return Status::OK;
}
bool VsockCameraDeviceSession::getRequestSettingsFmq(
uint64_t size, V3_2::CameraMetadata& hidl_settings) {
hidl_settings.resize(size);
return request_queue_->read(hidl_settings.data(), size);
}
bool VsockCameraDeviceSession::getRequestFromQueue(ReadVsockRequest& req,
unsigned int timeout_ms) {
auto timeout = std::chrono::milliseconds(timeout_ms);
std::unique_lock<std::mutex> lock(request_mutex_);
auto not_empty = [this] { return !pending_requests_.empty(); };
if (request_available_.wait_for(lock, timeout, not_empty)) {
req = pending_requests_.top();
pending_requests_.pop();
return true;
}
queue_empty_.notify_one();
return false;
}
void VsockCameraDeviceSession::putRequestToQueue(
const ReadVsockRequest& request) {
std::lock_guard<std::mutex> lock(request_mutex_);
pending_requests_.push(request);
request_available_.notify_one();
}
void VsockCameraDeviceSession::fillCaptureResult(
common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp) {
const uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
md.update(ANDROID_CONTROL_AF_STATE, &af_state, 1);
const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
md.update(ANDROID_CONTROL_AE_STATE, &aeState, 1);
const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
md.update(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
md.update(ANDROID_CONTROL_AWB_STATE, &awbState, 1);
const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
md.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
md.update(ANDROID_FLASH_STATE, &flashState, 1);
const uint8_t requestPipelineMaxDepth = 4;
md.update(ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1);
camera_metadata_entry active_array_size =
camera_characteristics_.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
md.update(ANDROID_SCALER_CROP_REGION, active_array_size.data.i32, 4);
md.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
const uint8_t lensShadingMapMode =
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
md.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
md.update(ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1);
}
using ::android::hardware::camera::device::V3_2::MsgType;
using ::android::hardware::camera::device::V3_2::NotifyMsg;
void VsockCameraDeviceSession::notifyShutter(uint32_t frame_number,
nsecs_t timestamp) {
NotifyMsg msg;
msg.type = MsgType::SHUTTER;
msg.msg.shutter.frameNumber = frame_number;
msg.msg.shutter.timestamp = timestamp;
callback_->notify({msg});
}
void VsockCameraDeviceSession::notifyError(uint32_t frame_number,
int32_t stream_id, ErrorCode code) {
NotifyMsg msg;
msg.type = MsgType::ERROR;
msg.msg.error.frameNumber = frame_number;
msg.msg.error.errorStreamId = stream_id;
msg.msg.error.errorCode = code;
callback_->notify({msg});
}
void VsockCameraDeviceSession::tryWriteFmqResult(V3_2::CaptureResult& result) {
result.fmqResultSize = 0;
if (result_queue_->availableToWrite() == 0 || result.result.size() == 0) {
return;
}
if (result_queue_->write(result.result.data(), result.result.size())) {
result.fmqResultSize = result.result.size();
result.result.resize(0);
}
}
using ::android::hardware::camera::device::V3_2::BufferStatus;
using ::android::hardware::graphics::common::V1_0::PixelFormat;
void VsockCameraDeviceSession::processRequestLoop(
unsigned int wait_timeout_ms) {
while (process_requests_.load()) {
ReadVsockRequest request;
if (!getRequestFromQueue(request, wait_timeout_ms)) {
continue;
}
if (!frame_provider_->isRunning()) {
notifyError(request.frame_number, -1, ErrorCode::ERROR_DEVICE);
break;
}
frame_provider_->waitYUVFrame(wait_timeout_ms);
nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
if (request.timestamp == 0) {
request.timestamp = now;
notifyShutter(request.frame_number, request.timestamp);
}
std::vector<ReleaseFence> release_fences;
std::vector<StreamBuffer> result_buffers;
std::vector<uint64_t> pending_buffers;
bool request_ok = true;
for (auto buffer_id : request.buffer_ids) {
auto buffer = buffer_cache_.get(buffer_id);
auto stream_id = buffer ? buffer->streamId() : -1;
if (!buffer || stream_cache_.count(stream_id) == 0) {
ALOGE("%s: Invalid buffer", __FUNCTION__);
notifyError(request.frame_number, -1, ErrorCode::ERROR_REQUEST);
request_ok = false;
break;
}
bool has_result = false;
auto stream = stream_cache_[stream_id];
if (flushing_requests_.load()) {
has_result = false;
release_fences.emplace_back(buffer->acquireFence());
} else if (stream.format == PixelFormat::YCBCR_420_888 ||
stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
auto dst_yuv =
buffer->acquireAsYUV(stream.width, stream.height, wait_timeout_ms);
has_result =
frame_provider_->copyYUVFrame(stream.width, stream.height, dst_yuv);
release_fences.emplace_back(buffer->release());
} else if (stream.format == PixelFormat::BLOB) {
auto time_elapsed = now - request.timestamp;
if (time_elapsed == 0) {
frame_provider_->requestJpeg();
pending_buffers.push_back(buffer_id);
continue;
} else if (frame_provider_->jpegPending()) {
static constexpr auto kMaxWaitNs = 2000000000L;
if (time_elapsed < kMaxWaitNs) {
pending_buffers.push_back(buffer_id);
continue;
}
ALOGE("%s: Blob request timed out after %" PRId64 "ms", __FUNCTION__,
ns2ms(time_elapsed));
frame_provider_->cancelJpegRequest();
has_result = false;
release_fences.emplace_back(buffer->acquireFence());
notifyError(request.frame_number, buffer->streamId(),
ErrorCode::ERROR_BUFFER);
} else {
ALOGI("%s: Blob ready - capture duration=%" PRId64 "ms", __FUNCTION__,
ns2ms(time_elapsed));
auto dst_blob =
buffer->acquireAsBlob(max_blob_size_, wait_timeout_ms);
has_result = frame_provider_->copyJpegData(max_blob_size_, dst_blob);
release_fences.emplace_back(buffer->release());
}
} else {
ALOGE("%s: Format %d not supported", __FUNCTION__, stream.format);
has_result = false;
release_fences.emplace_back(buffer->acquireFence());
notifyError(request.frame_number, buffer->streamId(),
ErrorCode::ERROR_BUFFER);
}
result_buffers.push_back(
{.streamId = buffer->streamId(),
.bufferId = buffer->bufferId(),
.buffer = nullptr,
.status = has_result ? BufferStatus::OK : BufferStatus::ERROR,
.releaseFence = release_fences.back().handle()});
}
if (!request_ok) {
continue;
}
V3_2::CaptureResult result;
bool results_filled = request.settings.exists(ANDROID_SENSOR_TIMESTAMP);
if (!results_filled) {
fillCaptureResult(request.settings, request.timestamp);
const camera_metadata_t* metadata = request.settings.getAndLock();
V3_2::implementation::convertToHidl(metadata, &result.result);
request.settings.unlock(metadata);
tryWriteFmqResult(result);
}
if (!result_buffers.empty() || !results_filled) {
result.frameNumber = request.frame_number;
result.partialResult = !results_filled ? 1 : 0;
result.inputBuffer.streamId = -1;
result.outputBuffers = result_buffers;
std::vector<V3_2::CaptureResult> results{result};
auto status = callback_->processCaptureResult(results);
release_fences.clear();
if (!status.isOk()) {
ALOGE("%s: processCaptureResult error: %s", __FUNCTION__,
status.description().c_str());
}
}
if (!pending_buffers.empty()) {
// some buffers still pending
request.buffer_ids = pending_buffers;
putRequestToQueue(request);
}
}
}
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,142 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hardware/graphics/mapper/2.0/IMapper.h>
#include <fmq/MessageQueue.h>
#include <queue>
#include <thread>
#include "stream_buffer_cache.h"
#include "vsock_camera_metadata.h"
#include "vsock_frame_provider.h"
namespace android::hardware::camera::device::V3_4::implementation {
using ::android::sp;
using ::android::hardware::hidl_vec;
using ::android::hardware::kSynchronizedReadWrite;
using ::android::hardware::MessageQueue;
using ::android::hardware::Return;
using ::android::hardware::camera::common::V1_0::Status;
using ::android::hardware::camera::device::V3_2::BufferCache;
using ::android::hardware::camera::device::V3_2::CaptureRequest;
using ::android::hardware::camera::device::V3_2::ErrorCode;
using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback;
using ::android::hardware::camera::device::V3_2::RequestTemplate;
using ::android::hardware::camera::device::V3_2::Stream;
using ::android::hardware::camera::device::V3_4::ICameraDeviceSession;
using ::android::hardware::camera::device::V3_4::StreamConfiguration;
using ::android::hardware::graphics::mapper::V2_0::YCbCrLayout;
class VsockCameraDeviceSession : public ICameraDeviceSession {
public:
VsockCameraDeviceSession(
VsockCameraMetadata camera_characteristics,
std::shared_ptr<cuttlefish::VsockFrameProvider> frame_provider,
const sp<ICameraDeviceCallback>& callback);
~VsockCameraDeviceSession();
Return<void> constructDefaultRequestSettings(
RequestTemplate,
ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb);
Return<void> configureStreams(const V3_2::StreamConfiguration&,
ICameraDeviceSession::configureStreams_cb);
Return<void> configureStreams_3_3(
const V3_2::StreamConfiguration&,
ICameraDeviceSession::configureStreams_3_3_cb);
Return<void> configureStreams_3_4(
const V3_4::StreamConfiguration& requestedConfiguration,
ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb);
Return<void> getCaptureRequestMetadataQueue(
ICameraDeviceSession::getCaptureRequestMetadataQueue_cb);
Return<void> getCaptureResultMetadataQueue(
ICameraDeviceSession::getCaptureResultMetadataQueue_cb);
Return<void> processCaptureRequest(
const hidl_vec<CaptureRequest>&, const hidl_vec<BufferCache>&,
ICameraDeviceSession::processCaptureRequest_cb);
Return<Status> flush();
Return<void> close();
Return<void> processCaptureRequest_3_4(
const hidl_vec<V3_4::CaptureRequest>& requests,
const hidl_vec<V3_2::BufferCache>& cachesToRemove,
ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb);
private:
struct ReadVsockRequest {
std::vector<uint64_t> buffer_ids;
uint32_t frame_number;
nsecs_t timestamp;
common::V1_0::helper::CameraMetadata settings;
uint32_t buffer_count;
};
struct VsockRequestComparator {
bool operator()(const ReadVsockRequest& lhs, const ReadVsockRequest& rhs) {
return lhs.frame_number > rhs.frame_number;
}
};
void updateBufferCaches(const hidl_vec<BufferCache>& to_remove);
Status configureStreams(const V3_2::StreamConfiguration& config,
V3_3::HalStreamConfiguration* out);
unsigned int getBlobSize(
const V3_4::StreamConfiguration& requestedConfiguration);
Status isStreamConfigurationSupported(
const V3_2::StreamConfiguration& config);
void updateStreamInfo(const V3_2::StreamConfiguration& config);
Status processOneCaptureRequest(const CaptureRequest& request);
bool getRequestSettingsFmq(uint64_t size,
V3_2::CameraMetadata& hidl_settings);
void processRequestLoop(unsigned int timeout);
bool getRequestFromQueue(ReadVsockRequest& request, unsigned int timeout_ms);
void putRequestToQueue(const ReadVsockRequest& request);
void fillCaptureResult(common::V1_0::helper::CameraMetadata& md,
nsecs_t timestamp);
void notifyShutter(uint32_t frame_number, nsecs_t timestamp);
void notifyError(uint32_t frame_number, int32_t stream_id, ErrorCode code);
void tryWriteFmqResult(V3_2::CaptureResult& result);
VsockCameraMetadata camera_characteristics_;
std::shared_ptr<cuttlefish::VsockFrameProvider> frame_provider_;
const sp<ICameraDeviceCallback> callback_;
std::unique_ptr<MessageQueue<uint8_t, kSynchronizedReadWrite>> request_queue_;
std::shared_ptr<MessageQueue<uint8_t, kSynchronizedReadWrite>> result_queue_;
std::mutex settings_mutex_;
common::V1_0::helper::CameraMetadata latest_request_settings_;
StreamBufferCache buffer_cache_;
std::map<int32_t, Stream> stream_cache_;
std::mutex request_mutex_;
std::condition_variable request_available_;
std::condition_variable queue_empty_;
std::priority_queue<ReadVsockRequest, std::vector<ReadVsockRequest>,
VsockRequestComparator>
pending_requests_;
std::thread request_processor_;
std::atomic<bool> process_requests_;
std::atomic<bool> flushing_requests_;
unsigned int max_blob_size_;
};
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,374 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "vsock_camera_metadata.h"
#include <hardware/camera3.h>
#include <utils/misc.h>
#include <vector>
namespace android::hardware::camera::device::V3_4::implementation {
namespace {
// Mostly copied from ExternalCameraDevice
const uint8_t kHardwarelevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
const uint8_t kAberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
const uint8_t kAvailableAberrationModes[] = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
const int32_t kExposureCompensation = 0;
const uint8_t kAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
const int32_t kControlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
const uint8_t kVideoStabilizationMode =
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
const uint8_t kAwbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO;
const uint8_t kAePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
const uint8_t kAeAvailableMode = ANDROID_CONTROL_AE_MODE_ON;
const uint8_t kAvailableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF;
const uint8_t kControlMode = ANDROID_CONTROL_MODE_AUTO;
const uint8_t kControlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF,
ANDROID_CONTROL_MODE_AUTO};
const uint8_t kEdgeMode = ANDROID_EDGE_MODE_OFF;
const uint8_t kFlashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
const uint8_t kFlashMode = ANDROID_FLASH_MODE_OFF;
const uint8_t kHotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF;
const uint8_t kJpegQuality = 90;
const int32_t kJpegOrientation = 0;
const int32_t kThumbnailSize[] = {240, 180};
const int32_t kJpegAvailableThumbnailSizes[] = {0, 0, 240, 180};
const uint8_t kFocusDistanceCalibration =
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
const uint8_t kOpticalStabilizationMode =
ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
const uint8_t kFacing = ANDROID_LENS_FACING_EXTERNAL;
const float kLensMinFocusDistance = 0.0f;
const uint8_t kNoiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
const int32_t kPartialResultCount = 1;
const uint8_t kRequestPipelineMaxDepth = 4;
const int32_t kRequestMaxNumInputStreams = 0;
const float kScalerAvailableMaxDigitalZoom[] = {1};
const uint8_t kCroppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
const int32_t kTestPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
const int32_t kTestPatternModes[] = {ANDROID_SENSOR_TEST_PATTERN_MODE_OFF};
const uint8_t kTimestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
const int32_t kOrientation = 0;
const uint8_t kAvailableShadingMode = ANDROID_SHADING_MODE_OFF;
const uint8_t kFaceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
const int32_t kMaxFaceCount = 0;
const uint8_t kAvailableHotpixelMode =
ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
const uint8_t kLensShadingMapMode =
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
const int32_t kMaxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
const int32_t kControlAeCompensationRange[] = {0, 0};
const camera_metadata_rational_t kControlAeCompensationStep[] = {{0, 1}};
const uint8_t kAfTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
const uint8_t kAfMode = ANDROID_CONTROL_AF_MODE_OFF;
const uint8_t kAfAvailableModes[] = {ANDROID_CONTROL_AF_MODE_OFF};
const uint8_t kAvailableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
const uint8_t kAeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
const uint8_t kAwbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
const int32_t kHalFormats[] = {HAL_PIXEL_FORMAT_BLOB,
HAL_PIXEL_FORMAT_YCbCr_420_888,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
const int32_t kRequestMaxNumOutputStreams[] = {
/*RAW*/ 0,
/*Processed*/ 2,
/*Stall*/ 1};
const uint8_t kAvailableCapabilities[] = {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE};
const int32_t kAvailableRequestKeys[] = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
ANDROID_CONTROL_AE_ANTIBANDING_MODE,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
ANDROID_CONTROL_AE_LOCK,
ANDROID_CONTROL_AE_MODE,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
ANDROID_CONTROL_AF_MODE,
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AWB_LOCK,
ANDROID_CONTROL_AWB_MODE,
ANDROID_CONTROL_CAPTURE_INTENT,
ANDROID_CONTROL_EFFECT_MODE,
ANDROID_CONTROL_MODE,
ANDROID_CONTROL_SCENE_MODE,
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
ANDROID_FLASH_MODE,
ANDROID_JPEG_ORIENTATION,
ANDROID_JPEG_QUALITY,
ANDROID_JPEG_THUMBNAIL_QUALITY,
ANDROID_JPEG_THUMBNAIL_SIZE,
ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
ANDROID_NOISE_REDUCTION_MODE,
ANDROID_SCALER_CROP_REGION,
ANDROID_SENSOR_TEST_PATTERN_MODE,
ANDROID_STATISTICS_FACE_DETECT_MODE,
ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE};
const int32_t kAvailableResultKeys[] = {
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
ANDROID_CONTROL_AE_ANTIBANDING_MODE,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
ANDROID_CONTROL_AE_LOCK,
ANDROID_CONTROL_AE_MODE,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AE_STATE,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
ANDROID_CONTROL_AF_MODE,
ANDROID_CONTROL_AF_STATE,
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AWB_LOCK,
ANDROID_CONTROL_AWB_MODE,
ANDROID_CONTROL_AWB_STATE,
ANDROID_CONTROL_CAPTURE_INTENT,
ANDROID_CONTROL_EFFECT_MODE,
ANDROID_CONTROL_MODE,
ANDROID_CONTROL_SCENE_MODE,
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
ANDROID_FLASH_MODE,
ANDROID_FLASH_STATE,
ANDROID_JPEG_ORIENTATION,
ANDROID_JPEG_QUALITY,
ANDROID_JPEG_THUMBNAIL_QUALITY,
ANDROID_JPEG_THUMBNAIL_SIZE,
ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
ANDROID_NOISE_REDUCTION_MODE,
ANDROID_REQUEST_PIPELINE_DEPTH,
ANDROID_SCALER_CROP_REGION,
ANDROID_SENSOR_TIMESTAMP,
ANDROID_STATISTICS_FACE_DETECT_MODE,
ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
ANDROID_STATISTICS_SCENE_FLICKER};
const int32_t kAvailableCharacteristicsKeys[] = {
ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
ANDROID_CONTROL_AE_AVAILABLE_MODES,
ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
ANDROID_CONTROL_AE_COMPENSATION_RANGE,
ANDROID_CONTROL_AE_COMPENSATION_STEP,
ANDROID_CONTROL_AE_LOCK_AVAILABLE,
ANDROID_CONTROL_AF_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_EFFECTS,
ANDROID_CONTROL_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
ANDROID_CONTROL_AWB_AVAILABLE_MODES,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_MAX_REGIONS,
ANDROID_FLASH_INFO_AVAILABLE,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
ANDROID_LENS_FACING,
ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
ANDROID_SCALER_CROPPING_TYPE,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
ANDROID_SENSOR_ORIENTATION,
ANDROID_SHADING_AVAILABLE_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
ANDROID_SYNC_MAX_LATENCY};
const std::map<RequestTemplate, uint8_t> kTemplateToIntent = {
{RequestTemplate::PREVIEW, ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW},
{RequestTemplate::STILL_CAPTURE,
ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE},
{RequestTemplate::VIDEO_RECORD,
ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD},
{RequestTemplate::VIDEO_SNAPSHOT,
ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT},
};
} // namespace
// Constructor sets the default characteristics for vsock camera
VsockCameraMetadata::VsockCameraMetadata(int32_t width, int32_t height,
int32_t fps)
: width_(width), height_(height), fps_(fps) {
update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, kControlAeCompensationRange,
NELEM(kControlAeCompensationRange));
update(ANDROID_CONTROL_AE_COMPENSATION_STEP, kControlAeCompensationStep,
NELEM(kControlAeCompensationStep));
update(ANDROID_CONTROL_AF_AVAILABLE_MODES, kAfAvailableModes,
NELEM(kAfAvailableModes));
update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &kAvailableSceneMode, 1);
update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &kAeLockAvailable, 1);
update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &kAwbLockAvailable, 1);
update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
kScalerAvailableMaxDigitalZoom, NELEM(kScalerAvailableMaxDigitalZoom));
update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, kAvailableCapabilities,
NELEM(kAvailableCapabilities));
update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &kHardwarelevel, 1);
update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
kAvailableAberrationModes, NELEM(kAvailableAberrationModes));
update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &kAntibandingMode, 1);
update(ANDROID_CONTROL_MAX_REGIONS, kControlMaxRegions,
NELEM(kControlMaxRegions));
update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
&kVideoStabilizationMode, 1);
update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &kAwbAvailableMode, 1);
update(ANDROID_CONTROL_AE_AVAILABLE_MODES, &kAeAvailableMode, 1);
update(ANDROID_CONTROL_AVAILABLE_EFFECTS, &kAvailableFffect, 1);
update(ANDROID_CONTROL_AVAILABLE_MODES, kControlAvailableModes,
NELEM(kControlAvailableModes));
update(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &kEdgeMode, 1);
update(ANDROID_FLASH_INFO_AVAILABLE, &kFlashInfo, 1);
update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &kHotPixelMode, 1);
update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, kJpegAvailableThumbnailSizes,
NELEM(kJpegAvailableThumbnailSizes));
update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
&kFocusDistanceCalibration, 1);
update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &kLensMinFocusDistance, 1);
update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
&kOpticalStabilizationMode, 1);
update(ANDROID_LENS_FACING, &kFacing, 1);
update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
&kNoiseReductionMode, 1);
update(ANDROID_NOISE_REDUCTION_MODE, &kNoiseReductionMode, 1);
update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &kPartialResultCount, 1);
update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &kRequestPipelineMaxDepth, 1);
update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, kRequestMaxNumOutputStreams,
NELEM(kRequestMaxNumOutputStreams));
update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &kRequestMaxNumInputStreams, 1);
update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
kScalerAvailableMaxDigitalZoom, NELEM(kScalerAvailableMaxDigitalZoom));
update(ANDROID_SCALER_CROPPING_TYPE, &kCroppingType, 1);
update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, kTestPatternModes,
NELEM(kTestPatternModes));
update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &kTimestampSource, 1);
update(ANDROID_SENSOR_ORIENTATION, &kOrientation, 1);
update(ANDROID_SHADING_AVAILABLE_MODES, &kAvailableShadingMode, 1);
update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &kFaceDetectMode,
1);
update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &kMaxFaceCount, 1);
update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
&kAvailableHotpixelMode, 1);
update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
&kLensShadingMapMode, 1);
update(ANDROID_SYNC_MAX_LATENCY, &kMaxLatency, 1);
update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, kAvailableRequestKeys,
NELEM(kAvailableRequestKeys));
update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, kAvailableResultKeys,
NELEM(kAvailableResultKeys));
update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
kAvailableCharacteristicsKeys, NELEM(kAvailableCharacteristicsKeys));
// assume max 2bytes/pixel + info because client might provide us pngs
const int32_t jpeg_max_size = width * height * 2 + sizeof(camera3_jpeg_blob);
update(ANDROID_JPEG_MAX_SIZE, &jpeg_max_size, 1);
std::vector<int64_t> min_frame_durations;
std::vector<int32_t> stream_configurations;
std::vector<int64_t> stall_durations;
int64_t frame_duration = 1000000000L / fps;
for (const auto& format : kHalFormats) {
stream_configurations.push_back(format);
min_frame_durations.push_back(format);
stall_durations.push_back(format);
stream_configurations.push_back(width);
min_frame_durations.push_back(width);
stall_durations.push_back(width);
stream_configurations.push_back(height);
min_frame_durations.push_back(height);
stall_durations.push_back(height);
stream_configurations.push_back(
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
min_frame_durations.push_back(frame_duration);
stall_durations.push_back((format == HAL_PIXEL_FORMAT_BLOB) ? 2000000000L
: 0);
}
update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
stream_configurations.data(), stream_configurations.size());
update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
min_frame_durations.data(), min_frame_durations.size());
update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stall_durations.data(),
stall_durations.size());
int32_t active_array_size[] = {0, 0, width, height};
update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
active_array_size, NELEM(active_array_size));
update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, active_array_size,
NELEM(active_array_size));
int32_t pixel_array_size[] = {width, height};
update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixel_array_size,
NELEM(pixel_array_size));
int32_t max_frame_rate = fps;
int32_t min_frame_rate = max_frame_rate / 2;
int32_t frame_rates[] = {min_frame_rate, max_frame_rate};
update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, frame_rates,
NELEM(frame_rates));
int64_t max_frame_duration = 1000000000L / min_frame_rate;
update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &max_frame_duration, 1);
}
VsockCameraRequestMetadata::VsockCameraRequestMetadata(int32_t fps,
RequestTemplate type) {
update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &kAberrationMode, 1);
update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &kExposureCompensation, 1);
update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &kVideoStabilizationMode, 1);
update(ANDROID_CONTROL_AWB_MODE, &kAwbAvailableMode, 1);
update(ANDROID_CONTROL_AE_MODE, &kAeAvailableMode, 1);
update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &kAePrecaptureTrigger, 1);
update(ANDROID_CONTROL_AF_MODE, &kAfMode, 1);
update(ANDROID_CONTROL_AF_TRIGGER, &kAfTrigger, 1);
update(ANDROID_CONTROL_SCENE_MODE, &kAvailableSceneMode, 1);
update(ANDROID_CONTROL_EFFECT_MODE, &kAvailableFffect, 1);
update(ANDROID_FLASH_MODE, &kFlashMode, 1);
update(ANDROID_JPEG_THUMBNAIL_SIZE, kThumbnailSize, NELEM(kThumbnailSize));
update(ANDROID_JPEG_QUALITY, &kJpegQuality, 1);
update(ANDROID_JPEG_THUMBNAIL_QUALITY, &kJpegQuality, 1);
update(ANDROID_JPEG_ORIENTATION, &kJpegOrientation, 1);
update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &kOpticalStabilizationMode,
1);
update(ANDROID_NOISE_REDUCTION_MODE, &kNoiseReductionMode, 1);
update(ANDROID_SENSOR_TEST_PATTERN_MODE, &kTestPatternMode, 1);
update(ANDROID_STATISTICS_FACE_DETECT_MODE, &kFaceDetectMode, 1);
update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &kAvailableHotpixelMode, 1);
int32_t max_frame_rate = fps;
int32_t min_frame_rate = max_frame_rate / 2;
int32_t frame_rates[] = {min_frame_rate, max_frame_rate};
update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, frame_rates, NELEM(frame_rates));
update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &kAntibandingMode, 1);
update(ANDROID_CONTROL_MODE, &kControlMode, 1);
auto it = kTemplateToIntent.find(type);
if (it != kTemplateToIntent.end()) {
auto intent = it->second;
update(ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1);
is_valid_ = true;
} else {
is_valid_ = false;
}
}
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,52 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <CameraMetadata.h>
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
namespace android::hardware::camera::device::V3_4::implementation {
// Small wrappers for mostly hard-coded camera metadata
// Some parameters are calculated from remote camera frame size and fps
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
class VsockCameraMetadata : public CameraMetadata {
public:
VsockCameraMetadata(int32_t width, int32_t height, int32_t fps);
int32_t getPreferredWidth() const { return width_; }
int32_t getPreferredHeight() const { return height_; }
int32_t getPreferredFps() const { return fps_; }
private:
int32_t width_;
int32_t height_;
int32_t fps_;
};
using ::android::hardware::camera::device::V3_2::RequestTemplate;
class VsockCameraRequestMetadata : public CameraMetadata {
public:
VsockCameraRequestMetadata(int32_t fps, RequestTemplate type);
// Tells whether the metadata has been successfully constructed
// from the parameters
bool isValid() const { return is_valid_; }
private:
bool is_valid_;
};
} // namespace android::hardware::camera::device::V3_4::implementation

View File

@ -0,0 +1,159 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "VsockCameraProvider"
#include "vsock_camera_provider_2_7.h"
#include <cutils/properties.h>
#include <log/log.h>
#include "vsock_camera_server.h"
namespace android::hardware::camera::provider::V2_7::implementation {
namespace {
VsockCameraServer gCameraServer;
constexpr auto kDeviceName = "device@3.4/external/0";
} // namespace
using android::hardware::camera::provider::V2_7::ICameraProvider;
extern "C" ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name) {
return (strcmp(name, "external/0") == 0)
? new VsockCameraProvider(&gCameraServer)
: nullptr;
}
VsockCameraProvider::VsockCameraProvider(VsockCameraServer* server) {
server_ = server;
if (!server->isRunning()) {
constexpr static const auto camera_port_property =
"ro.boot.vsock_camera_port";
constexpr static const auto camera_cid_property =
"ro.boot.vsock_camera_cid";
auto port = property_get_int32(camera_port_property, -1);
auto cid = property_get_int32(camera_cid_property, -1);
if (port > 0) {
server->start(port, cid);
}
}
}
VsockCameraProvider::~VsockCameraProvider() {
server_->setConnectedCallback(nullptr);
}
Return<Status> VsockCameraProvider::setCallback(
const sp<ICameraProviderCallback>& callback) {
{
std::lock_guard<std::mutex> lock(mutex_);
callbacks_ = callback;
}
server_->setConnectedCallback(
[this](std::shared_ptr<cuttlefish::VsockConnection> connection,
VsockCameraDevice::Settings settings) {
connection_ = connection;
settings_ = settings;
deviceAdded(kDeviceName);
connection_->SetDisconnectCallback(
[this] { deviceRemoved(kDeviceName); });
});
return Status::OK;
}
Return<void> VsockCameraProvider::getVendorTags(
ICameraProvider::getVendorTags_cb _hidl_cb) {
// No vendor tag support
hidl_vec<VendorTagSection> empty;
_hidl_cb(Status::OK, empty);
return Void();
}
Return<void> VsockCameraProvider::getCameraIdList(
ICameraProvider::getCameraIdList_cb _hidl_cb) {
// External camera HAL always report 0 camera, and extra cameras
// are just reported via cameraDeviceStatusChange callbacks
hidl_vec<hidl_string> empty;
_hidl_cb(Status::OK, empty);
return Void();
}
Return<void> VsockCameraProvider::isSetTorchModeSupported(
ICameraProvider::isSetTorchModeSupported_cb _hidl_cb) {
// setTorchMode API is supported, though right now no external camera device
// has a flash unit.
_hidl_cb(Status::OK, true);
return Void();
}
Return<void> VsockCameraProvider::getCameraDeviceInterface_V1_x(
const hidl_string&,
ICameraProvider::getCameraDeviceInterface_V1_x_cb _hidl_cb) {
// External Camera HAL does not support HAL1
_hidl_cb(Status::OPERATION_NOT_SUPPORTED, nullptr);
return Void();
}
Return<void> VsockCameraProvider::getCameraDeviceInterface_V3_x(
const hidl_string& name_hidl_str,
ICameraProvider::getCameraDeviceInterface_V3_x_cb _hidl_cb) {
std::string name(name_hidl_str.c_str());
if (name != kDeviceName) {
_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
return Void();
}
_hidl_cb(Status::OK, new VsockCameraDevice(name, settings_, connection_));
return Void();
}
Return<void> VsockCameraProvider::notifyDeviceStateChange(
hardware::hidl_bitfield<DeviceState> /*newState*/) {
return Void();
}
Return<void> VsockCameraProvider::getConcurrentStreamingCameraIds(
getConcurrentStreamingCameraIds_cb _hidl_cb) {
hidl_vec<hidl_vec<hidl_string>> hidl_camera_id_combinations;
_hidl_cb(Status::OK, hidl_camera_id_combinations);
return Void();
}
Return<void> VsockCameraProvider::isConcurrentStreamCombinationSupported(
const hidl_vec<::android::hardware::camera::provider::V2_6::CameraIdAndStreamCombination>&,
isConcurrentStreamCombinationSupported_cb _hidl_cb) {
_hidl_cb(Status::OK, false);
return Void();
}
Return<void> VsockCameraProvider::isConcurrentStreamCombinationSupported_2_7(
const hidl_vec<::android::hardware::camera::provider::V2_7::CameraIdAndStreamCombination>&,
isConcurrentStreamCombinationSupported_2_7_cb _hidl_cb) {
_hidl_cb(Status::OK, false);
return Void();
}
void VsockCameraProvider::deviceAdded(const char* name) {
std::lock_guard<std::mutex> lock(mutex_);
if (callbacks_ != nullptr) {
callbacks_->cameraDeviceStatusChange(name, CameraDeviceStatus::PRESENT);
}
}
void VsockCameraProvider::deviceRemoved(const char* name) {
std::lock_guard<std::mutex> lock(mutex_);
if (callbacks_ != nullptr) {
callbacks_->cameraDeviceStatusChange(name, CameraDeviceStatus::NOT_PRESENT);
}
}
} // namespace android::hardware::camera::provider::V2_7::implementation

View File

@ -0,0 +1,81 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <mutex>
#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
#include <hidl/MQDescriptor.h>
#include <hidl/Status.h>
#include <json/json.h>
#include "vsock_camera_device_3_4.h"
#include "vsock_camera_server.h"
#include "vsock_connection.h"
namespace android::hardware::camera::provider::V2_7::implementation {
using ::android::sp;
using ::android::hardware::hidl_string;
using ::android::hardware::Return;
using ::android::hardware::camera::common::V1_0::CameraDeviceStatus;
using ::android::hardware::camera::common::V1_0::Status;
using ::android::hardware::camera::common::V1_0::VendorTagSection;
using ::android::hardware::camera::device::V3_4::implementation::
VsockCameraDevice;
using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback;
using ::android::hardware::camera::provider::V2_5::DeviceState;
using ::android::hardware::camera::provider::V2_7::ICameraProvider;
class VsockCameraProvider : public ICameraProvider {
public:
VsockCameraProvider(VsockCameraServer* server);
~VsockCameraProvider();
Return<Status> setCallback(
const sp<ICameraProviderCallback>& callback) override;
Return<void> getVendorTags(getVendorTags_cb _hidl_cb) override;
Return<void> getCameraIdList(getCameraIdList_cb _hidl_cb) override;
Return<void> isSetTorchModeSupported(
isSetTorchModeSupported_cb _hidl_cb) override;
Return<void> getCameraDeviceInterface_V1_x(
const hidl_string& cameraDeviceName,
getCameraDeviceInterface_V1_x_cb _hidl_cb) override;
Return<void> getCameraDeviceInterface_V3_x(
const hidl_string& cameraDeviceName,
getCameraDeviceInterface_V3_x_cb _hidl_cb) override;
Return<void> notifyDeviceStateChange(
hardware::hidl_bitfield<DeviceState> newState) override;
Return<void> getConcurrentStreamingCameraIds(
getConcurrentStreamingCameraIds_cb _hidl_cb) override;
Return<void> isConcurrentStreamCombinationSupported(
const hidl_vec<::android::hardware::camera::provider::V2_6::CameraIdAndStreamCombination>& configs,
isConcurrentStreamCombinationSupported_cb _hidl_cb) override;
Return<void> isConcurrentStreamCombinationSupported_2_7(
const hidl_vec<::android::hardware::camera::provider::V2_7::CameraIdAndStreamCombination>& configs,
isConcurrentStreamCombinationSupported_2_7_cb _hidl_cb) override;
private:
void deviceRemoved(const char* name);
void deviceAdded(const char* name);
std::mutex mutex_;
sp<ICameraProviderCallback> callbacks_;
std::shared_ptr<cuttlefish::VsockConnection> connection_;
VsockCameraDevice::Settings settings_;
VsockCameraServer* server_;
};
} // namespace android::hardware::camera::provider::V2_7::implementation

View File

@ -0,0 +1,102 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "VsockCameraServer"
#include "vsock_camera_server.h"
#include <log/log.h>
namespace android::hardware::camera::provider::V2_7::implementation {
using ::android::hardware::camera::device::V3_4::implementation::
VsockCameraDevice;
namespace {
bool containsValidSettings(const VsockCameraDevice::Settings& settings) {
return settings.width > 0 && settings.height > 0 && settings.frame_rate > 0.0;
}
bool readSettingsFromJson(VsockCameraDevice::Settings& settings,
const Json::Value& json) {
VsockCameraDevice::Settings new_settings;
new_settings.width = json["width"].asInt();
new_settings.height = json["height"].asInt();
new_settings.frame_rate = json["frame_rate"].asDouble();
if (containsValidSettings(new_settings)) {
settings = new_settings;
return true;
} else {
return false;
}
}
} // namespace
VsockCameraServer::VsockCameraServer() {
ALOGI("%s: Create server", __FUNCTION__);
connection_ = std::make_shared<cuttlefish::VsockServerConnection>();
}
VsockCameraServer::~VsockCameraServer() {
ALOGI("%s: Destroy server", __FUNCTION__);
stop();
}
void VsockCameraServer::start(unsigned int port, unsigned int cid) {
stop();
is_running_ = true;
server_thread_ = std::thread([this, port, cid] { serverLoop(port, cid); });
}
void VsockCameraServer::stop() {
connection_->ServerShutdown();
is_running_ = false;
if (server_thread_.joinable()) {
server_thread_.join();
}
}
void VsockCameraServer::setConnectedCallback(callback_t callback) {
connected_callback_ = callback;
std::lock_guard<std::mutex> lock(settings_mutex_);
if (callback && connection_->IsConnected() &&
containsValidSettings(settings_)) {
callback(connection_, settings_);
}
}
void VsockCameraServer::serverLoop(unsigned int port, unsigned int cid) {
while (is_running_.load()) {
ALOGI("%s: Accepting connections...", __FUNCTION__);
if (connection_->Connect(port, cid)) {
auto json_settings = connection_->ReadJsonMessage();
VsockCameraDevice::Settings settings;
if (readSettingsFromJson(settings, json_settings)) {
std::lock_guard<std::mutex> lock(settings_mutex_);
settings_ = settings;
if (connected_callback_) {
connected_callback_(connection_, settings);
}
ALOGI("%s: Client connected", __FUNCTION__);
} else {
ALOGE("%s: Could not read settings", __FUNCTION__);
}
} else {
ALOGE("%s: Accepting connections failed", __FUNCTION__);
}
}
ALOGI("%s: Exiting", __FUNCTION__);
}
} // namespace android::hardware::camera::provider::V2_7::implementation

View File

@ -0,0 +1,55 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <atomic>
#include <thread>
#include "vsock_camera_device_3_4.h"
#include "vsock_connection.h"
namespace android::hardware::camera::provider::V2_7::implementation {
using ::android::hardware::camera::device::V3_4::implementation::
VsockCameraDevice;
class VsockCameraServer {
public:
VsockCameraServer();
~VsockCameraServer();
VsockCameraServer(const VsockCameraServer&) = delete;
VsockCameraServer& operator=(const VsockCameraServer&) = delete;
bool isRunning() const { return is_running_.load(); }
void start(unsigned int port, unsigned int cid);
void stop();
using callback_t =
std::function<void(std::shared_ptr<cuttlefish::VsockConnection>,
VsockCameraDevice::Settings)>;
void setConnectedCallback(callback_t callback);
private:
void serverLoop(unsigned int port, unsigned int cid);
std::thread server_thread_;
std::atomic<bool> is_running_;
std::shared_ptr<cuttlefish::VsockServerConnection> connection_;
std::mutex settings_mutex_;
VsockCameraDevice::Settings settings_;
callback_t connected_callback_;
};
} // namespace android::hardware::camera::provider::V2_7::implementation

View File

@ -0,0 +1,172 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "vsock_frame_provider.h"
#include <hardware/camera3.h>
#include <libyuv.h>
#include <cstring>
#define LOG_TAG "VsockFrameProvider"
#include <log/log.h>
namespace cuttlefish {
VsockFrameProvider::~VsockFrameProvider() { stop(); }
void VsockFrameProvider::start(
std::shared_ptr<cuttlefish::VsockConnection> connection, uint32_t width,
uint32_t height) {
stop();
running_ = true;
connection_ = connection;
reader_thread_ =
std::thread([this, width, height] { VsockReadLoop(width, height); });
}
void VsockFrameProvider::stop() {
running_ = false;
jpeg_pending_ = false;
if (reader_thread_.joinable()) {
reader_thread_.join();
}
connection_ = nullptr;
}
bool VsockFrameProvider::waitYUVFrame(unsigned int max_wait_ms) {
auto timeout = std::chrono::milliseconds(max_wait_ms);
nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
std::unique_lock<std::mutex> lock(frame_mutex_);
return yuv_frame_updated_.wait_for(
lock, timeout, [this, now] { return timestamp_.load() > now; });
}
void VsockFrameProvider::requestJpeg() {
jpeg_pending_ = true;
Json::Value message;
message["event"] = "VIRTUAL_DEVICE_CAPTURE_IMAGE";
if (connection_) {
connection_->WriteMessage(message);
}
}
void VsockFrameProvider::cancelJpegRequest() { jpeg_pending_ = false; }
bool VsockFrameProvider::copyYUVFrame(uint32_t w, uint32_t h, YCbCrLayout dst) {
size_t y_size = w * h;
size_t cbcr_size = (w / 2) * (h / 2);
size_t total_size = y_size + 2 * cbcr_size;
std::lock_guard<std::mutex> lock(frame_mutex_);
if (frame_.size() < total_size) {
ALOGE("%s: %zu is too little for %ux%u frame", __FUNCTION__, frame_.size(),
w, h);
return false;
}
if (dst.y == nullptr) {
ALOGE("%s: Destination is nullptr!", __FUNCTION__);
return false;
}
YCbCrLayout src{.y = static_cast<void*>(frame_.data()),
.cb = static_cast<void*>(frame_.data() + y_size),
.cr = static_cast<void*>(frame_.data() + y_size + cbcr_size),
.yStride = w,
.cStride = w / 2,
.chromaStep = 1};
uint8_t* src_y = static_cast<uint8_t*>(src.y);
uint8_t* dst_y = static_cast<uint8_t*>(dst.y);
uint8_t* src_cb = static_cast<uint8_t*>(src.cb);
uint8_t* dst_cb = static_cast<uint8_t*>(dst.cb);
uint8_t* src_cr = static_cast<uint8_t*>(src.cr);
uint8_t* dst_cr = static_cast<uint8_t*>(dst.cr);
libyuv::CopyPlane(src_y, src.yStride, dst_y, dst.yStride, w, h);
if (dst.chromaStep == 1) {
// Planar
libyuv::CopyPlane(src_cb, src.cStride, dst_cb, dst.cStride, w / 2, h / 2);
libyuv::CopyPlane(src_cr, src.cStride, dst_cr, dst.cStride, w / 2, h / 2);
} else if (dst.chromaStep == 2 && dst_cr - dst_cb == 1) {
// Interleaved cb/cr planes starting with cb
libyuv::MergeUVPlane(src_cb, src.cStride, src_cr, src.cStride, dst_cb,
dst.cStride, w / 2, h / 2);
} else if (dst.chromaStep == 2 && dst_cb - dst_cr == 1) {
// Interleaved cb/cr planes starting with cr
libyuv::MergeUVPlane(src_cr, src.cStride, src_cb, src.cStride, dst_cr,
dst.cStride, w / 2, h / 2);
} else {
ALOGE("%s: Unsupported interleaved U/V layout", __FUNCTION__);
return false;
}
return true;
}
bool VsockFrameProvider::copyJpegData(uint32_t size, void* dst) {
std::lock_guard<std::mutex> lock(jpeg_mutex_);
auto jpeg_header_offset = size - sizeof(struct camera3_jpeg_blob);
if (cached_jpeg_.empty()) {
ALOGE("%s: No source data", __FUNCTION__);
return false;
} else if (dst == nullptr) {
ALOGE("%s: Destination is nullptr", __FUNCTION__);
return false;
} else if (jpeg_header_offset <= cached_jpeg_.size()) {
ALOGE("%s: %ubyte target buffer too small", __FUNCTION__, size);
return false;
}
std::memcpy(dst, cached_jpeg_.data(), cached_jpeg_.size());
struct camera3_jpeg_blob* blob = reinterpret_cast<struct camera3_jpeg_blob*>(
static_cast<char*>(dst) + jpeg_header_offset);
blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
blob->jpeg_size = cached_jpeg_.size();
cached_jpeg_.clear();
return true;
}
bool VsockFrameProvider::isBlob(const std::vector<char>& blob) {
bool is_png = blob.size() > 4 && (blob[0] & 0xff) == 0x89 &&
(blob[1] & 0xff) == 0x50 && (blob[2] & 0xff) == 0x4e &&
(blob[3] & 0xff) == 0x47;
bool is_jpeg =
blob.size() > 2 && (blob[0] & 0xff) == 0xff && (blob[1] & 0xff) == 0xd8;
return is_png || is_jpeg;
}
bool VsockFrameProvider::framesizeMatches(uint32_t width, uint32_t height,
const std::vector<char>& data) {
return data.size() == 3 * width * height / 2;
}
void VsockFrameProvider::VsockReadLoop(uint32_t width, uint32_t height) {
jpeg_pending_ = false;
while (running_.load() && connection_->ReadMessage(next_frame_)) {
if (framesizeMatches(width, height, next_frame_)) {
std::lock_guard<std::mutex> lock(frame_mutex_);
timestamp_ = systemTime();
frame_.swap(next_frame_);
yuv_frame_updated_.notify_one();
} else if (isBlob(next_frame_)) {
std::lock_guard<std::mutex> lock(jpeg_mutex_);
bool was_pending = jpeg_pending_.exchange(false);
if (was_pending) {
cached_jpeg_.swap(next_frame_);
}
} else {
ALOGE("%s: Unexpected data of %zu bytes", __FUNCTION__,
next_frame_.size());
}
}
if (!connection_->IsConnected()) {
ALOGE("%s: Connection closed - exiting", __FUNCTION__);
running_ = false;
}
}
} // namespace cuttlefish

View File

@ -0,0 +1,68 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <android/hardware/graphics/mapper/2.0/IMapper.h>
#include <atomic>
#include <mutex>
#include <thread>
#include <vector>
#include "utils/Timers.h"
#include "vsock_connection.h"
namespace cuttlefish {
using ::android::hardware::graphics::mapper::V2_0::YCbCrLayout;
// VsockFrameProvider reads data from vsock
// Users can get the data by using copyYUVFrame/copyJpegData methods
class VsockFrameProvider {
public:
VsockFrameProvider() = default;
~VsockFrameProvider();
VsockFrameProvider(const VsockFrameProvider&) = delete;
VsockFrameProvider& operator=(const VsockFrameProvider&) = delete;
void start(std::shared_ptr<cuttlefish::VsockConnection> connection,
uint32_t expected_width, uint32_t expected_height);
void stop();
void requestJpeg();
void cancelJpegRequest();
bool jpegPending() const { return jpeg_pending_.load(); }
bool isRunning() const { return running_.load(); }
bool waitYUVFrame(unsigned int max_wait_ms);
bool copyYUVFrame(uint32_t width, uint32_t height, YCbCrLayout dst);
bool copyJpegData(uint32_t size, void* dst);
private:
bool isBlob(const std::vector<char>& blob);
bool framesizeMatches(uint32_t width, uint32_t height,
const std::vector<char>& data);
void VsockReadLoop(uint32_t expected_width, uint32_t expected_height);
std::thread reader_thread_;
std::mutex frame_mutex_;
std::mutex jpeg_mutex_;
std::atomic<nsecs_t> timestamp_;
std::atomic<bool> running_;
std::atomic<bool> jpeg_pending_;
std::vector<char> frame_;
std::vector<char> next_frame_;
std::vector<char> cached_jpeg_;
std::condition_variable yuv_frame_updated_;
std::shared_ptr<cuttlefish::VsockConnection> connection_;
};
} // namespace cuttlefish

View File

@ -315,6 +315,8 @@ DEFINE_bool(protected_vm, false, "Boot in Protected VM mode");
DEFINE_bool(enable_audio, cuttlefish::HostArch() != cuttlefish::Arch::Arm64, DEFINE_bool(enable_audio, cuttlefish::HostArch() != cuttlefish::Arch::Arm64,
"Whether to play or capture audio"); "Whether to play or capture audio");
DEFINE_uint32(camera_server_port, 0, "camera vsock port");
DECLARE_string(assembly_dir); DECLARE_string(assembly_dir);
DECLARE_string(boot_image); DECLARE_string(boot_image);
DECLARE_string(system_image_dir); DECLARE_string(system_image_dir);
@ -786,6 +788,7 @@ CuttlefishConfig InitializeCuttlefishConfiguration(
instance.set_rootcanal_default_commands_file( instance.set_rootcanal_default_commands_file(
FLAGS_bluetooth_default_commands_file); FLAGS_bluetooth_default_commands_file);
instance.set_camera_server_port(FLAGS_camera_server_port);
instance.set_device_title(FLAGS_device_title); instance.set_device_title(FLAGS_device_title);
if (FLAGS_protected_vm) { if (FLAGS_protected_vm) {

View File

@ -22,6 +22,7 @@ cc_library_static {
srcs: [ srcs: [
"lib/audio_device.cpp", "lib/audio_device.cpp",
"lib/audio_track_source_impl.cpp", "lib/audio_track_source_impl.cpp",
"lib/camera_streamer.cpp",
"lib/client_handler.cpp", "lib/client_handler.cpp",
"lib/keyboard.cpp", "lib/keyboard.cpp",
"lib/local_recorder.cpp", "lib/local_recorder.cpp",

View File

@ -35,6 +35,7 @@
#include "common/libs/fs/shared_buf.h" #include "common/libs/fs/shared_buf.h"
#include "host/frontend/webrtc/adb_handler.h" #include "host/frontend/webrtc/adb_handler.h"
#include "host/frontend/webrtc/bluetooth_handler.h" #include "host/frontend/webrtc/bluetooth_handler.h"
#include "host/frontend/webrtc/lib/camera_controller.h"
#include "host/frontend/webrtc/lib/utils.h" #include "host/frontend/webrtc/lib/utils.h"
#include "host/libs/config/cuttlefish_config.h" #include "host/libs/config/cuttlefish_config.h"
@ -103,11 +104,13 @@ class ConnectionObserverForAndroid
cuttlefish::KernelLogEventsHandler *kernel_log_events_handler, cuttlefish::KernelLogEventsHandler *kernel_log_events_handler,
std::map<std::string, cuttlefish::SharedFD> std::map<std::string, cuttlefish::SharedFD>
commands_to_custom_action_servers, commands_to_custom_action_servers,
std::weak_ptr<DisplayHandler> display_handler) std::weak_ptr<DisplayHandler> display_handler,
CameraController *camera_controller)
: input_sockets_(input_sockets), : input_sockets_(input_sockets),
kernel_log_events_handler_(kernel_log_events_handler), kernel_log_events_handler_(kernel_log_events_handler),
commands_to_custom_action_servers_(commands_to_custom_action_servers), commands_to_custom_action_servers_(commands_to_custom_action_servers),
weak_display_handler_(display_handler) {} weak_display_handler_(display_handler),
camera_controller_(camera_controller) {}
virtual ~ConnectionObserverForAndroid() { virtual ~ConnectionObserverForAndroid() {
auto display_handler = weak_display_handler_.lock(); auto display_handler = weak_display_handler_.lock();
if (display_handler) { if (display_handler) {
@ -242,6 +245,9 @@ class ConnectionObserverForAndroid
void OnControlChannelOpen(std::function<bool(const Json::Value)> void OnControlChannelOpen(std::function<bool(const Json::Value)>
control_message_sender) override { control_message_sender) override {
LOG(VERBOSE) << "Control Channel open"; LOG(VERBOSE) << "Control Channel open";
if (camera_controller_) {
camera_controller_->SetMessageSender(control_message_sender);
}
kernel_log_subscription_id_ = kernel_log_subscription_id_ =
kernel_log_events_handler_->AddSubscriber(control_message_sender); kernel_log_events_handler_->AddSubscriber(control_message_sender);
} }
@ -282,6 +288,10 @@ class ConnectionObserverForAndroid
// Sensor HAL. // Sensor HAL.
} }
return; return;
} else if (command.rfind("camera_", 0) == 0 && camera_controller_) {
// Handle commands starting with "camera_" by camera controller
camera_controller_->HandleMessage(evt);
return;
} }
auto button_state = evt["button_state"].asString(); auto button_state = evt["button_state"].asString();
@ -328,6 +338,12 @@ class ConnectionObserverForAndroid
bluetooth_handler_->handleMessage(msg, size); bluetooth_handler_->handleMessage(msg, size);
} }
void OnCameraData(const std::vector<char> &data) override {
if (camera_controller_) {
camera_controller_->HandleMessage(data);
}
}
private: private:
cuttlefish::InputSockets& input_sockets_; cuttlefish::InputSockets& input_sockets_;
cuttlefish::KernelLogEventsHandler* kernel_log_events_handler_; cuttlefish::KernelLogEventsHandler* kernel_log_events_handler_;
@ -338,6 +354,7 @@ class ConnectionObserverForAndroid
std::map<std::string, cuttlefish::SharedFD> commands_to_custom_action_servers_; std::map<std::string, cuttlefish::SharedFD> commands_to_custom_action_servers_;
std::weak_ptr<DisplayHandler> weak_display_handler_; std::weak_ptr<DisplayHandler> weak_display_handler_;
std::set<int32_t> active_touch_slots_; std::set<int32_t> active_touch_slots_;
cuttlefish::CameraController *camera_controller_;
}; };
class ConnectionObserverDemuxer class ConnectionObserverDemuxer
@ -350,10 +367,12 @@ class ConnectionObserverDemuxer
std::map<std::string, cuttlefish::SharedFD> std::map<std::string, cuttlefish::SharedFD>
commands_to_custom_action_servers, commands_to_custom_action_servers,
std::weak_ptr<DisplayHandler> display_handler, std::weak_ptr<DisplayHandler> display_handler,
CameraController *camera_controller,
/* params for this class */ /* params for this class */
cuttlefish::confui::HostVirtualInput &confui_input) cuttlefish::confui::HostVirtualInput &confui_input)
: android_input_(input_sockets, kernel_log_events_handler, : android_input_(input_sockets, kernel_log_events_handler,
commands_to_custom_action_servers, display_handler), commands_to_custom_action_servers, display_handler,
camera_controller),
confui_input_{confui_input} {} confui_input_{confui_input} {}
virtual ~ConnectionObserverDemuxer() = default; virtual ~ConnectionObserverDemuxer() = default;
@ -431,6 +450,10 @@ class ConnectionObserverDemuxer
android_input_.OnBluetoothMessage(msg, size); android_input_.OnBluetoothMessage(msg, size);
} }
void OnCameraData(const std::vector<char> &data) override {
android_input_.OnCameraData(data);
}
private: private:
ConnectionObserverForAndroid android_input_; ConnectionObserverForAndroid android_input_;
cuttlefish::confui::HostVirtualInput &confui_input_; cuttlefish::confui::HostVirtualInput &confui_input_;
@ -449,7 +472,8 @@ CfConnectionObserverFactory::CreateObserver() {
return std::shared_ptr<cuttlefish::webrtc_streaming::ConnectionObserver>( return std::shared_ptr<cuttlefish::webrtc_streaming::ConnectionObserver>(
new ConnectionObserverDemuxer(input_sockets_, kernel_log_events_handler_, new ConnectionObserverDemuxer(input_sockets_, kernel_log_events_handler_,
commands_to_custom_action_servers_, commands_to_custom_action_servers_,
weak_display_handler_, confui_input_)); weak_display_handler_, camera_controller_,
confui_input_));
} }
void CfConnectionObserverFactory::AddCustomActionServer( void CfConnectionObserverFactory::AddCustomActionServer(
@ -465,4 +489,9 @@ void CfConnectionObserverFactory::SetDisplayHandler(
std::weak_ptr<DisplayHandler> display_handler) { std::weak_ptr<DisplayHandler> display_handler) {
weak_display_handler_ = display_handler; weak_display_handler_ = display_handler;
} }
void CfConnectionObserverFactory::SetCameraHandler(
CameraController *controller) {
camera_controller_ = controller;
}
} // namespace cuttlefish } // namespace cuttlefish

View File

@ -22,6 +22,7 @@
#include "common/libs/fs/shared_fd.h" #include "common/libs/fs/shared_fd.h"
#include "host/frontend/webrtc/display_handler.h" #include "host/frontend/webrtc/display_handler.h"
#include "host/frontend/webrtc/kernel_log_events_handler.h" #include "host/frontend/webrtc/kernel_log_events_handler.h"
#include "host/frontend/webrtc/lib/camera_controller.h"
#include "host/frontend/webrtc/lib/connection_observer.h" #include "host/frontend/webrtc/lib/connection_observer.h"
#include "host/libs/confui/host_virtual_input.h" #include "host/libs/confui/host_virtual_input.h"
@ -59,6 +60,8 @@ class CfConnectionObserverFactory
void SetDisplayHandler(std::weak_ptr<DisplayHandler> display_handler); void SetDisplayHandler(std::weak_ptr<DisplayHandler> display_handler);
void SetCameraHandler(CameraController* controller);
private: private:
InputSockets& input_sockets_; InputSockets& input_sockets_;
KernelLogEventsHandler* kernel_log_events_handler_; KernelLogEventsHandler* kernel_log_events_handler_;
@ -66,6 +69,7 @@ class CfConnectionObserverFactory
commands_to_custom_action_servers_; commands_to_custom_action_servers_;
std::weak_ptr<DisplayHandler> weak_display_handler_; std::weak_ptr<DisplayHandler> weak_display_handler_;
cuttlefish::confui::HostVirtualInput& confui_input_; cuttlefish::confui::HostVirtualInput& confui_input_;
cuttlefish::CameraController* camera_controller_ = nullptr;
}; };
} // namespace cuttlefish } // namespace cuttlefish

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <json/json.h>
namespace cuttlefish {
class CameraController {
public:
virtual ~CameraController() = default;
// Handle data messages coming from the client
virtual void HandleMessage(const std::vector<char>& message) = 0;
// Handle control messages coming from the client
virtual void HandleMessage(const Json::Value& message) = 0;
// Send control messages to client
virtual void SendMessage(const Json::Value& msg) {
if (message_sender_) {
message_sender_(msg);
}
}
virtual void SetMessageSender(
std::function<bool(const Json::Value& msg)> sender) {
message_sender_ = sender;
}
protected:
std::function<bool(const Json::Value& msg)> message_sender_;
};
} // namespace cuttlefish

View File

@ -0,0 +1,162 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_streamer.h"
#include <android-base/logging.h>
#include <chrono>
#include "common/libs/utils/vsock_connection.h"
namespace cuttlefish {
namespace webrtc_streaming {
CameraStreamer::CameraStreamer(unsigned int port, unsigned int cid)
: cid_(cid), port_(port) {}
CameraStreamer::~CameraStreamer() { Disconnect(); }
// We are getting frames from the client so try forwarding those to the CVD
void CameraStreamer::OnFrame(const webrtc::VideoFrame& client_frame) {
std::lock_guard<std::mutex> lock(onframe_mutex_);
if (!cvd_connection_.IsConnected() && !pending_connection_.valid()) {
// Start new connection
pending_connection_ = cvd_connection_.ConnectAsync(port_, cid_);
return;
} else if (pending_connection_.valid()) {
if (!IsConnectionReady()) {
return;
}
std::lock_guard<std::mutex> lock(settings_mutex_);
if (!cvd_connection_.WriteMessage(settings_buffer_)) {
LOG(ERROR) << "Failed writing camera settings:";
return;
}
StartReadLoop();
LOG(INFO) << "Connected!";
}
auto resolution = resolution_.load();
if (resolution.height <= 0 || resolution.width <= 0) {
// We don't have a valid resolution that is necessary for
// potential frame scaling
return;
}
auto frame = client_frame.video_frame_buffer()->ToI420().get();
if (frame->width() != resolution.width ||
frame->height() != resolution.height) {
// incoming resolution does not match with the resolution we
// have communicated to the CVD - scaling required
if (!scaled_frame_ || resolution.width != scaled_frame_->width() ||
resolution.height != scaled_frame_->height()) {
scaled_frame_ =
webrtc::I420Buffer::Create(resolution.width, resolution.height);
}
scaled_frame_->CropAndScaleFrom(*frame);
frame = scaled_frame_.get();
}
if (!VsockSendYUVFrame(frame)) {
LOG(ERROR) << "Sending frame over vsock failed";
}
}
// Handle message json coming from client
void CameraStreamer::HandleMessage(const Json::Value& message) {
auto command = message["command"].asString();
if (command == "camera_settings") {
// save local copy of resolution that is required for frame scaling
resolution_ = GetResolutionFromSettings(message);
Json::StreamWriterBuilder factory;
std::string new_settings = Json::writeString(factory, message);
if (!settings_buffer_.empty() && new_settings != settings_buffer_) {
// Settings have changed - disconnect
// Next incoming frames will trigger re-connection
Disconnect();
}
std::lock_guard<std::mutex> lock(settings_mutex_);
settings_buffer_ = new_settings;
LOG(INFO) << "New camera settings received:" << new_settings;
}
}
// Handle binary blobs coming from client
void CameraStreamer::HandleMessage(const std::vector<char>& message) {
LOG(INFO) << "Pass through " << message.size() << "bytes";
std::lock_guard<std::mutex> lock(frame_mutex_);
cvd_connection_.WriteMessage(message);
}
CameraStreamer::Resolution CameraStreamer::GetResolutionFromSettings(
const Json::Value& settings) {
return {.width = settings["width"].asInt(),
.height = settings["height"].asInt()};
}
bool CameraStreamer::VsockSendYUVFrame(
const webrtc::I420BufferInterface* frame) {
int32_t size = frame->width() * frame->height() +
2 * frame->ChromaWidth() * frame->ChromaHeight();
const char* y = reinterpret_cast<const char*>(frame->DataY());
const char* u = reinterpret_cast<const char*>(frame->DataU());
const char* v = reinterpret_cast<const char*>(frame->DataV());
auto chroma_width = frame->ChromaWidth();
auto chroma_height = frame->ChromaHeight();
std::lock_guard<std::mutex> lock(frame_mutex_);
return cvd_connection_.Write(size) &&
cvd_connection_.WriteStrides(y, frame->width(), frame->height(),
frame->StrideY()) &&
cvd_connection_.WriteStrides(u, chroma_width, chroma_height,
frame->StrideU()) &&
cvd_connection_.WriteStrides(v, chroma_width, chroma_height,
frame->StrideV());
}
bool CameraStreamer::IsConnectionReady() {
if (!pending_connection_.valid()) {
return cvd_connection_.IsConnected();
} else if (pending_connection_.wait_for(std::chrono::seconds(0)) !=
std::future_status::ready) {
// Still waiting for connection
return false;
} else if (settings_buffer_.empty()) {
// connection is ready but we have not yet received client
// camera settings
return false;
}
return pending_connection_.get();
}
void CameraStreamer::StartReadLoop() {
if (reader_thread_.joinable()) {
reader_thread_.join();
}
reader_thread_ = std::thread([this] {
while (cvd_connection_.IsConnected()) {
auto json_value = cvd_connection_.ReadJsonMessage();
if (!json_value.empty()) {
SendMessage(json_value);
}
}
LOG(INFO) << "Exit reader thread";
});
}
void CameraStreamer::Disconnect() {
cvd_connection_.Disconnect();
if (reader_thread_.joinable()) {
reader_thread_.join();
}
}
} // namespace webrtc_streaming
} // namespace cuttlefish

View File

@ -0,0 +1,73 @@
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <api/video/i420_buffer.h>
#include <api/video/video_frame.h>
#include <api/video/video_sink_interface.h>
#include <json/json.h>
#include "common/libs/utils/vsock_connection.h"
#include "host/frontend/webrtc/lib/camera_controller.h"
#include <atomic>
#include <mutex>
#include <thread>
#include <vector>
namespace cuttlefish {
namespace webrtc_streaming {
class CameraStreamer : public rtc::VideoSinkInterface<webrtc::VideoFrame>,
public CameraController {
public:
CameraStreamer(unsigned int port, unsigned int cid);
~CameraStreamer();
CameraStreamer(const CameraStreamer& other) = delete;
CameraStreamer& operator=(const CameraStreamer& other) = delete;
void OnFrame(const webrtc::VideoFrame& frame) override;
void HandleMessage(const Json::Value& message) override;
void HandleMessage(const std::vector<char>& message) override;
private:
using Resolution = struct {
int32_t width;
int32_t height;
};
bool ForwardClientMessage(const Json::Value& message);
Resolution GetResolutionFromSettings(const Json::Value& settings);
bool VsockSendYUVFrame(const webrtc::I420BufferInterface* frame);
bool IsConnectionReady();
void StartReadLoop();
void Disconnect();
std::future<bool> pending_connection_;
VsockClientConnection cvd_connection_;
std::atomic<Resolution> resolution_;
std::mutex settings_mutex_;
std::string settings_buffer_;
std::mutex frame_mutex_;
std::mutex onframe_mutex_;
rtc::scoped_refptr<webrtc::I420Buffer> scaled_frame_;
unsigned int cid_;
unsigned int port_;
std::thread reader_thread_;
};
} // namespace webrtc_streaming
} // namespace cuttlefish

View File

@ -39,6 +39,8 @@ namespace {
static constexpr auto kInputChannelLabel = "input-channel"; static constexpr auto kInputChannelLabel = "input-channel";
static constexpr auto kAdbChannelLabel = "adb-channel"; static constexpr auto kAdbChannelLabel = "adb-channel";
static constexpr auto kBluetoothChannelLabel = "bluetooth-channel"; static constexpr auto kBluetoothChannelLabel = "bluetooth-channel";
static constexpr auto kCameraDataChannelLabel = "camera-data-channel";
static constexpr auto kCameraDataEof = "EOF";
class CvdCreateSessionDescriptionObserver class CvdCreateSessionDescriptionObserver
: public webrtc::CreateSessionDescriptionObserver { : public webrtc::CreateSessionDescriptionObserver {
@ -166,6 +168,22 @@ class BluetoothChannelHandler : public webrtc::DataChannelObserver {
bool channel_open_reported_ = false; bool channel_open_reported_ = false;
}; };
class CameraChannelHandler : public webrtc::DataChannelObserver {
public:
CameraChannelHandler(
rtc::scoped_refptr<webrtc::DataChannelInterface> bluetooth_channel,
std::shared_ptr<ConnectionObserver> observer);
~CameraChannelHandler() override;
void OnStateChange() override;
void OnMessage(const webrtc::DataBuffer &msg) override;
private:
rtc::scoped_refptr<webrtc::DataChannelInterface> camera_channel_;
std::shared_ptr<ConnectionObserver> observer_;
std::vector<char> receive_buffer_;
};
InputChannelHandler::InputChannelHandler( InputChannelHandler::InputChannelHandler(
rtc::scoped_refptr<webrtc::DataChannelInterface> input_channel, rtc::scoped_refptr<webrtc::DataChannelInterface> input_channel,
std::shared_ptr<ConnectionObserver> observer) std::shared_ptr<ConnectionObserver> observer)
@ -375,22 +393,53 @@ void BluetoothChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
observer_->OnBluetoothMessage(msg.data.cdata(), msg.size()); observer_->OnBluetoothMessage(msg.data.cdata(), msg.size());
} }
CameraChannelHandler::CameraChannelHandler(
rtc::scoped_refptr<webrtc::DataChannelInterface> camera_channel,
std::shared_ptr<ConnectionObserver> observer)
: camera_channel_(camera_channel), observer_(observer) {
camera_channel_->RegisterObserver(this);
}
CameraChannelHandler::~CameraChannelHandler() {
camera_channel_->UnregisterObserver();
}
void CameraChannelHandler::OnStateChange() {
LOG(VERBOSE) << "Camera channel state changed to "
<< webrtc::DataChannelInterface::DataStateString(
camera_channel_->state());
}
void CameraChannelHandler::OnMessage(const webrtc::DataBuffer &msg) {
auto msg_data = msg.data.cdata<char>();
if (msg.size() == strlen(kCameraDataEof) &&
!strncmp(msg_data, kCameraDataEof, msg.size())) {
// Send complete buffer to observer on EOF marker
observer_->OnCameraData(receive_buffer_);
receive_buffer_.clear();
return;
}
// Otherwise buffer up data
receive_buffer_.insert(receive_buffer_.end(), msg_data,
msg_data + msg.size());
}
std::shared_ptr<ClientHandler> ClientHandler::Create( std::shared_ptr<ClientHandler> ClientHandler::Create(
int client_id, std::shared_ptr<ConnectionObserver> observer, int client_id, std::shared_ptr<ConnectionObserver> observer,
std::function<void(const Json::Value &)> send_to_client_cb, std::function<void(const Json::Value &)> send_to_client_cb,
std::function<void()> on_connection_closed_cb) { std::function<void(bool)> on_connection_changed_cb) {
return std::shared_ptr<ClientHandler>(new ClientHandler( return std::shared_ptr<ClientHandler>(new ClientHandler(
client_id, observer, send_to_client_cb, on_connection_closed_cb)); client_id, observer, send_to_client_cb, on_connection_changed_cb));
} }
ClientHandler::ClientHandler( ClientHandler::ClientHandler(
int client_id, std::shared_ptr<ConnectionObserver> observer, int client_id, std::shared_ptr<ConnectionObserver> observer,
std::function<void(const Json::Value &)> send_to_client_cb, std::function<void(const Json::Value &)> send_to_client_cb,
std::function<void()> on_connection_closed_cb) std::function<void(bool)> on_connection_changed_cb)
: client_id_(client_id), : client_id_(client_id),
observer_(observer), observer_(observer),
send_to_client_(send_to_client_cb), send_to_client_(send_to_client_cb),
on_connection_closed_cb_(on_connection_closed_cb) {} on_connection_changed_cb_(on_connection_changed_cb) {}
ClientHandler::~ClientHandler() { ClientHandler::~ClientHandler() {
for (auto &data_channel : data_channels_) { for (auto &data_channel : data_channels_) {
@ -450,6 +499,17 @@ bool ClientHandler::AddAudio(
return true; return true;
} }
webrtc::VideoTrackInterface *ClientHandler::GetCameraStream() const {
for (const auto &tranceiver : peer_connection_->GetTransceivers()) {
auto track = tranceiver->receiver()->track();
if (track &&
track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
return static_cast<webrtc::VideoTrackInterface *>(track.get());
}
}
return nullptr;
}
void ClientHandler::LogAndReplyError(const std::string &error_msg) const { void ClientHandler::LogAndReplyError(const std::string &error_msg) const {
LOG(ERROR) << error_msg; LOG(ERROR) << error_msg;
Json::Value reply; Json::Value reply;
@ -607,7 +667,7 @@ void ClientHandler::Close() {
// will then wait for the callback to return -> deadlock). Destroying the // will then wait for the callback to return -> deadlock). Destroying the
// peer_connection_ has the same effect. The only alternative is to postpone // peer_connection_ has the same effect. The only alternative is to postpone
// that operation until after the callback returns. // that operation until after the callback returns.
on_connection_closed_cb_(); on_connection_changed_cb_(false);
} }
void ClientHandler::OnConnectionChange( void ClientHandler::OnConnectionChange(
@ -625,6 +685,7 @@ void ClientHandler::OnConnectionChange(
control_handler_->Send(msg, size, binary); control_handler_->Send(msg, size, binary);
return true; return true;
}); });
on_connection_changed_cb_(true);
break; break;
case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected: case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
LOG(VERBOSE) << "Client " << client_id_ << ": Connection disconnected"; LOG(VERBOSE) << "Client " << client_id_ << ": Connection disconnected";
@ -667,6 +728,9 @@ void ClientHandler::OnDataChannel(
} else if (label == kBluetoothChannelLabel) { } else if (label == kBluetoothChannelLabel) {
bluetooth_handler_.reset( bluetooth_handler_.reset(
new BluetoothChannelHandler(data_channel, observer_)); new BluetoothChannelHandler(data_channel, observer_));
} else if (label == kCameraDataChannelLabel) {
camera_data_handler_.reset(
new CameraChannelHandler(data_channel, observer_));
} else { } else {
LOG(VERBOSE) << "Data channel connected: " << label; LOG(VERBOSE) << "Data channel connected: " << label;
data_channels_.push_back(data_channel); data_channels_.push_back(data_channel);

View File

@ -21,6 +21,7 @@
#include <optional> #include <optional>
#include <sstream> #include <sstream>
#include <string> #include <string>
#include <utility>
#include <vector> #include <vector>
#include <json/json.h> #include <json/json.h>
@ -37,6 +38,7 @@ class InputChannelHandler;
class AdbChannelHandler; class AdbChannelHandler;
class ControlChannelHandler; class ControlChannelHandler;
class BluetoothChannelHandler; class BluetoothChannelHandler;
class CameraChannelHandler;
class ClientHandler : public webrtc::PeerConnectionObserver, class ClientHandler : public webrtc::PeerConnectionObserver,
public std::enable_shared_from_this<ClientHandler> { public std::enable_shared_from_this<ClientHandler> {
@ -44,7 +46,7 @@ class ClientHandler : public webrtc::PeerConnectionObserver,
static std::shared_ptr<ClientHandler> Create( static std::shared_ptr<ClientHandler> Create(
int client_id, std::shared_ptr<ConnectionObserver> observer, int client_id, std::shared_ptr<ConnectionObserver> observer,
std::function<void(const Json::Value&)> send_client_cb, std::function<void(const Json::Value&)> send_client_cb,
std::function<void()> on_connection_closed_cb); std::function<void(bool)> on_connection_changed_cb);
~ClientHandler() override; ~ClientHandler() override;
bool SetPeerConnection( bool SetPeerConnection(
@ -56,6 +58,8 @@ class ClientHandler : public webrtc::PeerConnectionObserver,
bool AddAudio(rtc::scoped_refptr<webrtc::AudioTrackInterface> track, bool AddAudio(rtc::scoped_refptr<webrtc::AudioTrackInterface> track,
const std::string& label); const std::string& label);
webrtc::VideoTrackInterface* GetCameraStream() const;
void HandleMessage(const Json::Value& client_message); void HandleMessage(const Json::Value& client_message);
// CreateSessionDescriptionObserver implementation // CreateSessionDescriptionObserver implementation
@ -107,7 +111,7 @@ class ClientHandler : public webrtc::PeerConnectionObserver,
}; };
ClientHandler(int client_id, std::shared_ptr<ConnectionObserver> observer, ClientHandler(int client_id, std::shared_ptr<ConnectionObserver> observer,
std::function<void(const Json::Value&)> send_client_cb, std::function<void(const Json::Value&)> send_client_cb,
std::function<void()> on_connection_closed_cb); std::function<void(bool)> on_connection_changed_cb);
// Intentionally private, disconnect the client by destroying the object. // Intentionally private, disconnect the client by destroying the object.
void Close(); void Close();
@ -118,13 +122,14 @@ class ClientHandler : public webrtc::PeerConnectionObserver,
State state_ = State::kNew; State state_ = State::kNew;
std::shared_ptr<ConnectionObserver> observer_; std::shared_ptr<ConnectionObserver> observer_;
std::function<void(const Json::Value&)> send_to_client_; std::function<void(const Json::Value&)> send_to_client_;
std::function<void()> on_connection_closed_cb_; std::function<void(bool)> on_connection_changed_cb_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_; rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
std::vector<rtc::scoped_refptr<webrtc::DataChannelInterface>> data_channels_; std::vector<rtc::scoped_refptr<webrtc::DataChannelInterface>> data_channels_;
std::unique_ptr<InputChannelHandler> input_handler_; std::unique_ptr<InputChannelHandler> input_handler_;
std::unique_ptr<AdbChannelHandler> adb_handler_; std::unique_ptr<AdbChannelHandler> adb_handler_;
std::unique_ptr<ControlChannelHandler> control_handler_; std::unique_ptr<ControlChannelHandler> control_handler_;
std::unique_ptr<BluetoothChannelHandler> bluetooth_handler_; std::unique_ptr<BluetoothChannelHandler> bluetooth_handler_;
std::unique_ptr<CameraChannelHandler> camera_data_handler_;
}; };
} // namespace webrtc_streaming } // namespace webrtc_streaming

View File

@ -45,6 +45,7 @@ class ConnectionObserver {
virtual void OnBluetoothChannelOpen( virtual void OnBluetoothChannelOpen(
std::function<bool(const uint8_t*, size_t)> bluetooth_message_sender) = 0; std::function<bool(const uint8_t*, size_t)> bluetooth_message_sender) = 0;
virtual void OnBluetoothMessage(const uint8_t* msg, size_t size) = 0; virtual void OnBluetoothMessage(const uint8_t* msg, size_t size) = 0;
virtual void OnCameraData(const std::vector<char>& data) = 0;
}; };
class ConnectionObserverFactory { class ConnectionObserverFactory {

View File

@ -34,6 +34,7 @@
#include "host/frontend/webrtc/lib/audio_device.h" #include "host/frontend/webrtc/lib/audio_device.h"
#include "host/frontend/webrtc/lib/audio_track_source_impl.h" #include "host/frontend/webrtc/lib/audio_track_source_impl.h"
#include "host/frontend/webrtc/lib/camera_streamer.h"
#include "host/frontend/webrtc/lib/client_handler.h" #include "host/frontend/webrtc/lib/client_handler.h"
#include "host/frontend/webrtc/lib/port_range_socket_factory.h" #include "host/frontend/webrtc/lib/port_range_socket_factory.h"
#include "host/frontend/webrtc/lib/video_track_source_impl.h" #include "host/frontend/webrtc/lib/video_track_source_impl.h"
@ -141,6 +142,7 @@ class Streamer::Impl : public WsConnectionObserver {
void SendMessageToClient(int client_id, const Json::Value& msg); void SendMessageToClient(int client_id, const Json::Value& msg);
void DestroyClientHandler(int client_id); void DestroyClientHandler(int client_id);
void SetupCameraForClient(int client_id);
// WsObserver // WsObserver
void OnOpen() override; void OnOpen() override;
@ -170,6 +172,7 @@ class Streamer::Impl : public WsConnectionObserver {
std::map<std::string, std::string> hardware_; std::map<std::string, std::string> hardware_;
std::vector<ControlPanelButtonDescriptor> custom_control_panel_buttons_; std::vector<ControlPanelButtonDescriptor> custom_control_panel_buttons_;
std::shared_ptr<AudioDeviceModuleWrapper> audio_device_module_; std::shared_ptr<AudioDeviceModuleWrapper> audio_device_module_;
std::unique_ptr<CameraStreamer> camera_streamer_;
}; };
Streamer::Streamer(std::unique_ptr<Streamer::Impl> impl) Streamer::Streamer(std::unique_ptr<Streamer::Impl> impl)
@ -263,6 +266,11 @@ std::shared_ptr<AudioSource> Streamer::GetAudioSource() {
return impl_->audio_device_module_; return impl_->audio_device_module_;
} }
CameraController* Streamer::AddCamera(unsigned int port, unsigned int cid) {
impl_->camera_streamer_ = std::make_unique<CameraStreamer>(port, cid);
return impl_->camera_streamer_.get();
}
void Streamer::SetHardwareSpec(std::string key, std::string value) { void Streamer::SetHardwareSpec(std::string key, std::string value) {
impl_->hardware_.emplace(key, value); impl_->hardware_.emplace(key, value);
} }
@ -566,7 +574,13 @@ std::shared_ptr<ClientHandler> Streamer::Impl::CreateClientHandler(
[this, client_id](const Json::Value& msg) { [this, client_id](const Json::Value& msg) {
SendMessageToClient(client_id, msg); SendMessageToClient(client_id, msg);
}, },
[this, client_id] { DestroyClientHandler(client_id); }); [this, client_id](bool isOpen) {
if (isOpen) {
SetupCameraForClient(client_id);
} else {
DestroyClientHandler(client_id);
}
});
webrtc::PeerConnectionInterface::RTCConfiguration config; webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
@ -641,5 +655,19 @@ void Streamer::Impl::DestroyClientHandler(int client_id) {
}); });
} }
void Streamer::Impl::SetupCameraForClient(int client_id) {
if (!camera_streamer_) {
return;
}
auto client_handler = clients_[client_id];
if (client_handler) {
auto camera_track = client_handler->GetCameraStream();
if (camera_track) {
camera_track->AddOrUpdateSink(camera_streamer_.get(),
rtc::VideoSinkWants());
}
}
}
} // namespace webrtc_streaming } // namespace webrtc_streaming
} // namespace cuttlefish } // namespace cuttlefish

View File

@ -28,6 +28,7 @@
#include "host/frontend/webrtc/lib/audio_sink.h" #include "host/frontend/webrtc/lib/audio_sink.h"
#include "host/frontend/webrtc/lib/audio_source.h" #include "host/frontend/webrtc/lib/audio_source.h"
#include "host/frontend/webrtc/lib/camera_controller.h"
#include "host/frontend/webrtc/lib/connection_observer.h" #include "host/frontend/webrtc/lib/connection_observer.h"
#include "host/frontend/webrtc/lib/local_recorder.h" #include "host/frontend/webrtc/lib/local_recorder.h"
#include "host/frontend/webrtc/lib/video_sink.h" #include "host/frontend/webrtc/lib/video_sink.h"
@ -98,6 +99,8 @@ class Streamer {
// stream here. // stream here.
std::shared_ptr<AudioSource> GetAudioSource(); std::shared_ptr<AudioSource> GetAudioSource();
CameraController* AddCamera(unsigned int port, unsigned int cid);
// Add a custom button to the control panel. // Add a custom button to the control panel.
void AddCustomControlPanelButton(const std::string& command, void AddCustomControlPanelButton(const std::string& command,
const std::string& title, const std::string& title,

View File

@ -33,6 +33,7 @@
#include "host/frontend/webrtc/connection_observer.h" #include "host/frontend/webrtc/connection_observer.h"
#include "host/frontend/webrtc/display_handler.h" #include "host/frontend/webrtc/display_handler.h"
#include "host/frontend/webrtc/kernel_log_events_handler.h" #include "host/frontend/webrtc/kernel_log_events_handler.h"
#include "host/frontend/webrtc/lib/camera_controller.h"
#include "host/frontend/webrtc/lib/local_recorder.h" #include "host/frontend/webrtc/lib/local_recorder.h"
#include "host/frontend/webrtc/lib/streamer.h" #include "host/frontend/webrtc/lib/streamer.h"
#include "host/frontend/webrtc/lib/video_sink.h" #include "host/frontend/webrtc/lib/video_sink.h"
@ -57,6 +58,7 @@ DEFINE_string(action_servers, "",
DEFINE_bool(write_virtio_input, true, DEFINE_bool(write_virtio_input, true,
"Whether to send input events in virtio format."); "Whether to send input events in virtio format.");
DEFINE_int32(audio_server_fd, -1, "An fd to listen on for audio frames"); DEFINE_int32(audio_server_fd, -1, "An fd to listen on for audio frames");
DEFINE_int32(camera_streamer_fd, -1, "An fd to send client camera frames");
using cuttlefish::AudioHandler; using cuttlefish::AudioHandler;
using cuttlefish::CfConnectionObserverFactory; using cuttlefish::CfConnectionObserverFactory;
@ -243,6 +245,12 @@ int main(int argc, char** argv) {
auto display_handler = auto display_handler =
std::make_shared<DisplayHandler>(std::move(displays), screen_connector); std::make_shared<DisplayHandler>(std::move(displays), screen_connector);
if (instance.camera_server_port()) {
auto camera_controller = streamer->AddCamera(instance.camera_server_port(),
instance.vsock_guest_cid());
observer_factory->SetCameraHandler(camera_controller);
}
std::unique_ptr<cuttlefish::webrtc_streaming::LocalRecorder> local_recorder; std::unique_ptr<cuttlefish::webrtc_streaming::LocalRecorder> local_recorder;
if (cvd_config->record_screen()) { if (cvd_config->record_screen()) {
int recording_num = 0; int recording_num = 0;

View File

@ -35,6 +35,7 @@
<div id='app-controls'> <div id='app-controls'>
<div id="keyboard-capture-control" title="Capture Keyboard"></div> <div id="keyboard-capture-control" title="Capture Keyboard"></div>
<div id="mic-capture-control" title="Capture Microphone"></div> <div id="mic-capture-control" title="Capture Microphone"></div>
<div id="camera-control" title="Capture Camera"></div>
<audio autoplay controls id="device-audio"></audio> <audio autoplay controls id="device-audio"></audio>
</div> </div>
<div id='status-div'> <div id='status-div'>

View File

@ -22,6 +22,8 @@ function ConnectToDevice(device_id) {
createToggleControl(keyboardCaptureCtrl, "keyboard", onKeyboardCaptureToggle); createToggleControl(keyboardCaptureCtrl, "keyboard", onKeyboardCaptureToggle);
const micCaptureCtrl = document.getElementById('mic-capture-control'); const micCaptureCtrl = document.getElementById('mic-capture-control');
createToggleControl(micCaptureCtrl, "mic", onMicCaptureToggle); createToggleControl(micCaptureCtrl, "mic", onMicCaptureToggle);
const cameraCtrl = document.getElementById('camera-control');
createToggleControl(cameraCtrl, "videocam", onVideoCaptureToggle);
const deviceAudio = document.getElementById('device-audio'); const deviceAudio = document.getElementById('device-audio');
const deviceDisplays = document.getElementById('device-displays'); const deviceDisplays = document.getElementById('device-displays');
@ -126,6 +128,11 @@ function ConnectToDevice(device_id) {
y_res: metadata.height y_res: metadata.height
}); });
} }
if (message_data.event == 'VIRTUAL_DEVICE_CAPTURE_IMAGE') {
if (deviceConnection.cameraEnabled) {
takePhoto();
}
}
} }
function getTransformRotation(element) { function getTransformRotation(element) {
@ -194,6 +201,20 @@ function ConnectToDevice(device_id) {
} }
} }
function takePhoto() {
const imageCapture = deviceConnection.imageCapture;
if (imageCapture) {
const photoSettings = {
imageWidth: deviceConnection.cameraWidth,
imageHeight: deviceConnection.cameraHeight
}
imageCapture.takePhoto(photoSettings)
.then(blob => blob.arrayBuffer())
.then(buffer => deviceConnection.sendOrQueueCameraData(buffer))
.catch(error => console.log(error));
}
}
function resizeDeviceDisplays() { function resizeDeviceDisplays() {
const deviceDisplayPadding = 10; const deviceDisplayPadding = 10;
@ -512,6 +533,10 @@ function ConnectToDevice(device_id) {
deviceConnection.useMic(enabled); deviceConnection.useMic(enabled);
} }
function onVideoCaptureToggle(enabled) {
deviceConnection.useVideo(enabled);
}
function cmdConsole(consoleViewName, consoleInputName) { function cmdConsole(consoleViewName, consoleInputName) {
let consoleView = document.getElementById(consoleViewName); let consoleView = document.getElementById(consoleViewName);

View File

@ -82,12 +82,22 @@ function awaitDataChannel(pc, label, onMessage) {
} }
class DeviceConnection { class DeviceConnection {
constructor(pc, control, audio_stream) { constructor(pc, control, media_stream) {
this._pc = pc; this._pc = pc;
this._control = control; this._control = control;
this._audio_stream = audio_stream; this._media_stream = media_stream;
// Disable the microphone by default // Disable the microphone by default
this.useMic(false); this.useMic(false);
this.useVideo(false);
this._cameraDataChannel = pc.createDataChannel('camera-data-channel');
this._cameraDataChannel.binaryType = 'arraybuffer'
this._cameraInputQueue = new Array();
var self = this;
this._cameraDataChannel.onbufferedamountlow = () => {
if (self._cameraInputQueue.length > 0) {
self.sendCameraData(self._cameraInputQueue.shift());
}
}
this._inputChannel = createDataChannel(pc, 'input-channel'); this._inputChannel = createDataChannel(pc, 'input-channel');
this._adbChannel = createDataChannel(pc, 'adb-channel', (msg) => { this._adbChannel = createDataChannel(pc, 'adb-channel', (msg) => {
if (this._onAdbMessage) { if (this._onAdbMessage) {
@ -110,6 +120,7 @@ class DeviceConnection {
console.error('Received unexpected Bluetooth message'); console.error('Received unexpected Bluetooth message');
} }
}); });
this.sendCameraResolution();
this._streams = {}; this._streams = {};
this._streamPromiseResolvers = {}; this._streamPromiseResolvers = {};
@ -135,6 +146,28 @@ class DeviceConnection {
return this._description; return this._description;
} }
get imageCapture() {
if (this._media_stream) {
const track = this._media_stream.getVideoTracks()[0]
return new ImageCapture(track);
}
return undefined;
}
get cameraWidth() {
return this._x_res;
}
get cameraHeight() {
return this._y_res;
}
get cameraEnabled() {
if (this._media_stream) {
return this._media_stream.getVideoTracks().some(track => track.enabled);
}
}
getStream(stream_id) { getStream(stream_id) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (this._streams[stream_id]) { if (this._streams[stream_id]) {
@ -200,11 +233,53 @@ class DeviceConnection {
} }
useMic(in_use) { useMic(in_use) {
if (this._audio_stream) { if (this._media_stream) {
this._audio_stream.getTracks().forEach(track => track.enabled = in_use); this._media_stream.getAudioTracks().forEach(track => track.enabled = in_use);
} }
} }
useVideo(in_use) {
if (this._media_stream) {
this._media_stream.getVideoTracks().forEach(track => track.enabled = in_use);
}
}
sendCameraResolution() {
if (this._media_stream) {
const cameraTracks = this._media_stream.getVideoTracks();
if (cameraTracks.length > 0) {
const settings = cameraTracks[0].getSettings();
this._x_res = settings.width;
this._y_res = settings.height;
this.sendControlMessage(JSON.stringify({
command: 'camera_settings',
width: settings.width,
height: settings.height,
frame_rate: settings.frameRate,
facing: settings.facingMode
}));
}
}
}
sendOrQueueCameraData(data) {
if (this._cameraDataChannel.bufferedAmount > 0 || this._cameraInputQueue.length > 0) {
this._cameraInputQueue.push(data);
} else {
this.sendCameraData(data);
}
}
sendCameraData(data) {
const MAX_SIZE = 65535;
const END_MARKER = 'EOF';
for (let i = 0; i < data.byteLength; i += MAX_SIZE) {
// range is clamped to the valid index range
this._cameraDataChannel.send(data.slice(i, i + MAX_SIZE));
}
this._cameraDataChannel.send(END_MARKER);
}
// Provide a callback to receive control-related comms from the device // Provide a callback to receive control-related comms from the device
onControlMessage(cb) { onControlMessage(cb) {
this._onControlMessage = cb; this._onControlMessage = cb;
@ -404,21 +479,20 @@ export async function Connect(deviceId, options) {
} }
let pc = createPeerConnection(infraConfig, control); let pc = createPeerConnection(infraConfig, control);
let audioStream; let mediaStream;
try { try {
audioStream = mediaStream =
await navigator.mediaDevices.getUserMedia({video: false, audio: true}); await navigator.mediaDevices.getUserMedia({video: true, audio: true});
const audioTracks = audioStream.getAudioTracks(); const tracks = mediaStream.getTracks();
if (audioTracks.length > 0) { tracks.forEach(track => {
console.log(`Using Audio device: ${audioTracks[0].label}, with ${ console.log(`Using ${track.kind} device: ${track.label}`);
audioTracks.length} tracks`); pc.addTrack(track, mediaStream);
audioTracks.forEach(track => pc.addTrack(track, audioStream)); });
}
} catch (e) { } catch (e) {
console.error("Failed to open audio device: ", e); console.error("Failed to open audio device: ", e);
} }
let deviceConnection = new DeviceConnection(pc, control, audioStream); let deviceConnection = new DeviceConnection(pc, control, mediaStream);
deviceConnection.description = deviceInfo; deviceConnection.description = deviceInfo;
async function acceptOfferAndReplyAnswer(offer) { async function acceptOfferAndReplyAnswer(offer) {
try { try {

View File

@ -143,6 +143,13 @@ std::vector<std::string> BootconfigArgsFromConfig(
instance.frames_server_port())); instance.frames_server_port()));
} }
if (instance.camera_server_port()) {
bootconfig_args.push_back(concat("androidboot.vsock_camera_port=",
instance.camera_server_port()));
bootconfig_args.push_back(
concat("androidboot.vsock_camera_cid=", instance.vsock_guest_cid()));
}
if (config.enable_modem_simulator() && if (config.enable_modem_simulator() &&
instance.modem_simulator_ports() != "") { instance.modem_simulator_ports() != "") {
bootconfig_args.push_back(concat("androidboot.modem_simulator_ports=", bootconfig_args.push_back(concat("androidboot.modem_simulator_ports=",

View File

@ -370,6 +370,8 @@ class CuttlefishConfig {
int rootcanal_hci_port() const; int rootcanal_hci_port() const;
int rootcanal_link_port() const; int rootcanal_link_port() const;
int rootcanal_test_port() const; int rootcanal_test_port() const;
// Port number to connect to the camera hal on the guest
int camera_server_port() const;
std::string rootcanal_config_file() const; std::string rootcanal_config_file() const;
std::string rootcanal_default_commands_file() const; std::string rootcanal_default_commands_file() const;
@ -485,6 +487,7 @@ class CuttlefishConfig {
void set_rootcanal_hci_port(int rootcanal_hci_port); void set_rootcanal_hci_port(int rootcanal_hci_port);
void set_rootcanal_link_port(int rootcanal_link_port); void set_rootcanal_link_port(int rootcanal_link_port);
void set_rootcanal_test_port(int rootcanal_test_port); void set_rootcanal_test_port(int rootcanal_test_port);
void set_camera_server_port(int camera_server_port);
void set_rootcanal_config_file(const std::string& rootcanal_config_file); void set_rootcanal_config_file(const std::string& rootcanal_config_file);
void set_rootcanal_default_commands_file( void set_rootcanal_default_commands_file(
const std::string& rootcanal_default_commands_file); const std::string& rootcanal_default_commands_file);

View File

@ -389,6 +389,15 @@ void CuttlefishConfig::MutableInstanceSpecific::set_rootcanal_test_port(
(*Dictionary())[kRootcanalTestPort] = rootcanal_test_port; (*Dictionary())[kRootcanalTestPort] = rootcanal_test_port;
} }
static constexpr char kCameraServerPort[] = "camera_server_port";
int CuttlefishConfig::InstanceSpecific::camera_server_port() const {
return (*Dictionary())[kCameraServerPort].asInt();
}
void CuttlefishConfig::MutableInstanceSpecific::set_camera_server_port(
int camera_server_port) {
(*Dictionary())[kCameraServerPort] = camera_server_port;
}
static constexpr char kRootcanalConfigFile[] = "rootcanal_config_file"; static constexpr char kRootcanalConfigFile[] = "rootcanal_config_file";
std::string CuttlefishConfig::InstanceSpecific::rootcanal_config_file() const { std::string CuttlefishConfig::InstanceSpecific::rootcanal_config_file() const {
return (*Dictionary())[kRootcanalConfigFile].asString(); return (*Dictionary())[kRootcanalConfigFile].asString();

View File

@ -469,11 +469,19 @@ PRODUCT_PACKAGES += $(LOCAL_DUMPSTATE_PRODUCT_PACKAGE)
# #
# Camera # Camera
# #
ifeq ($(TARGET_USE_VSOCK_CAMERA_HAL_IMPL),true)
PRODUCT_PACKAGES += \
android.hardware.camera.provider@2.7-external-vsock-service \
android.hardware.camera.provider@2.7-impl-cuttlefish
DEVICE_MANIFEST_FILE += \
device/google/cuttlefish/guest/hals/camera/manifest.xml
else
PRODUCT_PACKAGES += \ PRODUCT_PACKAGES += \
android.hardware.camera.provider@2.7-service-google \ android.hardware.camera.provider@2.7-service-google \
libgooglecamerahwl_impl \ libgooglecamerahwl_impl \
android.hardware.camera.provider@2.7-impl-google \ android.hardware.camera.provider@2.7-impl-google \
endif
# #
# Gatekeeper # Gatekeeper
# #

View File

@ -62,6 +62,7 @@
/vendor/bin/rename_netiface u:object_r:rename_netiface_exec:s0 /vendor/bin/rename_netiface u:object_r:rename_netiface_exec:s0
/vendor/bin/suspend_blocker u:object_r:suspend_blocker_exec:s0 /vendor/bin/suspend_blocker u:object_r:suspend_blocker_exec:s0
/vendor/bin/hw/libcuttlefish-rild u:object_r:libcuttlefish_rild_exec:s0 /vendor/bin/hw/libcuttlefish-rild u:object_r:libcuttlefish_rild_exec:s0
/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-external-vsock-service u:object_r:hal_camera_default_exec:s0
/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google u:object_r:hal_camera_default_exec:s0 /vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google u:object_r:hal_camera_default_exec:s0
/vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google-lazy u:object_r:hal_camera_default_exec:s0 /vendor/bin/hw/android\.hardware\.camera\.provider@2\.7-service-google-lazy u:object_r:hal_camera_default_exec:s0
/vendor/bin/hw/android\.hardware\.power\.stats@1\.0-service\.mock u:object_r:hal_power_stats_default_exec:s0 /vendor/bin/hw/android\.hardware\.power\.stats@1\.0-service\.mock u:object_r:hal_power_stats_default_exec:s0

View File

@ -10,3 +10,6 @@ binder_call(sensor_service_server, hal_camera_default)
hal_client_domain(hal_camera_default, hal_thermal) hal_client_domain(hal_camera_default, hal_thermal)
gpu_access(hal_camera_default) gpu_access(hal_camera_default)
# Vsocket camera
allow hal_camera_default self:vsock_socket { accept bind create getopt listen read write };