nitrillo / krankygeek Goto Github PK
View Code? Open in Web Editor NEWKranky Geek Android Application
License: MIT License
Kranky Geek Android Application
License: MIT License
Hi,
I am new to WebRTC, with the example code video calling is happening, after I updated the libjingle version to 11139 video calling is not happing, message flow is happening but remote video is not appearing, please help me out.
Can we cut this down to simple dataChannel use?
First, when I run app ,took an error about camera needed. So, add camera permission then on onActivityReslt() inits objects. Solved camera problem but cannot connect to other user. Server cannot see connection.
MainActivity class :
`
public class MainActivity extends AppCompatActivity {
private static final String SIGNALING_URI = "http://192.168.0.20:7000";
private static final String VIDEO_TRACK_ID = "video1";
private static final String AUDIO_TRACK_ID = "audio1";
private static final String LOCAL_STREAM_ID = "stream1";
private static final String SDP_MID = "sdpMid";
private static final String SDP_M_LINE_INDEX = "sdpMLineIndex";
private static final String SDP = "sdp";
private static final String CREATEOFFER = "createoffer";
private static final String OFFER = "offer";
private static final String ANSWER = "answer";
private static final String CANDIDATE = "candidate";
private PeerConnectionFactory peerConnectionFactory;
private VideoSource localVideoSource;
private PeerConnection peerConnection;
private MediaStream localMediaStream;
private VideoRenderer otherPeerRenderer;
private Socket socket;
private boolean createOffer = false;
private final int REQUEST_IMAGE_CAPTURE = 1001;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Intent i = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (i.resolveActivity(getPackageManager()) != null)
{
startActivityForResult(i, REQUEST_IMAGE_CAPTURE);
}
}
//***************************************************************
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (requestCode == REQUEST_IMAGE_CAPTURE) {
AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(true);
PeerConnectionFactory.initializeAndroidGlobals(
this, // Context
true, // Audio Enabled
true, // Video Enabled
true, // Hardware Acceleration Enabled
null); // Render EGL Context
peerConnectionFactory = new PeerConnectionFactory();
VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice());
localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
localVideoTrack.setEnabled(true);
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(true);
localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
localMediaStream.addTrack(localVideoTrack);
localMediaStream.addTrack(localAudioTrack);
GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);
VideoRendererGui.setView(videoView, null);
try {
otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
localVideoTrack.addRenderer(renderer);
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
Toast.makeText(MainActivity.this, "Err", Toast.LENGTH_LONG).show();
}
super.onActivityResult(requestCode, resultCode, data);
}
//***************************************************************
public void onConnect(View button) {
if (peerConnection != null)
return;
ArrayList<PeerConnection.IceServer> iceServers = new ArrayList<>();
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
peerConnection = peerConnectionFactory.createPeerConnection(iceServers, new MediaConstraints(), peerConnectionObserver);
peerConnection.addStream(localMediaStream);
try {
socket = IO.socket(SIGNALING_URI);
socket.on(CREATEOFFER, new Emitter.Listener() {
@Override
public void call(Object... args) {
createOffer = true;
peerConnection.createOffer(sdpObserver, new MediaConstraints());
}
}).on(OFFER, new Emitter.Listener() {
@Override
public void call(Object... args) {
try {
JSONObject obj = (JSONObject) args[0];
SessionDescription sdp = new SessionDescription(SessionDescription.Type.OFFER,
obj.getString(SDP));
peerConnection.setRemoteDescription(sdpObserver, sdp);
peerConnection.createAnswer(sdpObserver, new MediaConstraints());
} catch (JSONException e) {
e.printStackTrace();
}
}
}).on(ANSWER, new Emitter.Listener() {
@Override
public void call(Object... args) {
try {
JSONObject obj = (JSONObject) args[0];
SessionDescription sdp = new SessionDescription(SessionDescription.Type.ANSWER,
obj.getString(SDP));
peerConnection.setRemoteDescription(sdpObserver, sdp);
} catch (JSONException e) {
e.printStackTrace();
}
}
}).on(CANDIDATE, new Emitter.Listener() {
@Override
public void call(Object... args) {
try {
JSONObject obj = (JSONObject) args[0];
peerConnection.addIceCandidate(new IceCandidate(obj.getString(SDP_MID),
obj.getInt(SDP_M_LINE_INDEX),
obj.getString(SDP)));
} catch (JSONException e) {
e.printStackTrace();
}
}
});
socket.connect();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
SdpObserver sdpObserver = new SdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
peerConnection.setLocalDescription(sdpObserver, sessionDescription);
try {
JSONObject obj = new JSONObject();
obj.put(SDP, sessionDescription.description);
if (createOffer) {
socket.emit(OFFER, obj);
} else {
socket.emit(ANSWER, obj);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onSetSuccess() {
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
};
PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d("RTCAPP", "onSignalingChange:" + signalingState.toString());
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
Log.d("RTCAPP", "onIceConnectionChange:" + iceConnectionState.toString());
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
try {
JSONObject obj = new JSONObject();
obj.put(SDP_MID, iceCandidate.sdpMid);
obj.put(SDP_M_LINE_INDEX, iceCandidate.sdpMLineIndex);
obj.put(SDP, iceCandidate.sdp);
socket.emit(CANDIDATE, obj);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onAddStream(MediaStream mediaStream) {
mediaStream.videoTracks.getFirst().addRenderer(otherPeerRenderer);
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
}
@Override
public void onDataChannel(DataChannel dataChannel) {
}
@Override
public void onRenegotiationNeeded() {
}
};
}`
server.js file
var socketIO = require('socket.io'); var server = require('http').createServer().listen(7000, '192.168.0.20'); var io = socketIO.listen(server); io.sockets.on('connection', function (client) { console.log('new connection: ' + client.id); client.on('offer', function (details) { client.broadcast.emit('offer', details); console.log('offer: ' + JSON.stringify(details)); }); client.on('answer', function (details) { client.broadcast.emit('answer', details); console.log('answer: ' + JSON.stringify(details)); }); client.on('candidate', function (details) { client.broadcast.emit('candidate', details); console.log('candidate: ' + JSON.stringify(details)); }); client.broadcast.emit('createoffer', {}); });
Any hints?
Thank you
I am trying to integrate android webrtc with Face detection to use one camera but find no common ground between both of them as webrtc is using "org.webrtc.VideoSource" and facedetection is using "com.google.android.gms.vision.CameraSource". how do I eliminate one of the camera? Code for webrtc camera initialization is
VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfBackFacingDevice(), null);
// VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);
localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
localVideoTrack.setEnabled(true);
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(true);
localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
localMediaStream.addTrack(localVideoTrack);
localMediaStream.addTrack(localAudioTrack);
and code for camera of Google Face detection is:
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(15.0f)
.build();
}
A declarative, efficient, and flexible JavaScript library for building user interfaces.
๐ Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. ๐๐๐
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google โค๏ธ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.