How to record a video using webrtc - webrtc

I need to record a video using the laptop camera on my website built using nodejs. For this I am using webRTC. So far I could take a photo using the laptop camera but I need to record a video. Could some one help as to how the code would go? My current code is as follows:
<video id="video"></video>
<button id="startbutton">Take photo</button>
<button id="pausebutton">Pause</button>
<button id="resumebutton">Resume</button>
<canvas id="canvas"></canvas>
<script type="text/javascript">
(function() {
var streaming = false,
video = document.querySelector('#video'),
canvas = document.querySelector('#canvas'),
//photo = document.querySelector('#photo'),
startbutton = document.querySelector('#startbutton'),
width = 620,
height = 50;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
}
);
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
// photo.setAttribute('src', data);
}
function pausevideo() {
canvas.width = width;
canvas.height = height;
video.pause();
}
function resumevideo() {
canvas.width = width;
canvas.height = height;
video.play();
}
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
pausebutton.addEventListener('click', function(ev){
pausevideo();
ev.preventDefault();
}, false);
resumebutton.addEventListener('click', function(ev){
resumevideo();
ev.preventDefault();
}, false);
})();
</script>

I am not going to write code for you(you seem pretty capable if you have gotten this far) but here are some pointers to get you in the right direction.
Assign a global variable the value of the stream when you grab it(this way you can reuse the same stream in numerous functions
Once you have the stream you can easily follow the tutorials at RTC-Recording, there is a write to disk method that should help you out in downloading the recording
If you have a stream, this is how to start using RecordRTC.
var options = {
type: 'video'
};
var recordRTC = RecordRTC(mediaStream, options);
recordRTC.startRecording();
recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL; //plays the recorded blob url on that src
});

Related

Fabric JS 2.4.1 ClipPath Crop Not Working with a Dynamically Created rect Mask

I am making a editor using fabric js 2.4.1 and have completed all functionality except for the dynamic image cropping. The functionality involves creating a rectangle with the mouse over an image and clicking a crop button.
I have successfully done a proof of concept with a rectangle that was created statically but can't get it to render in my dynamic code. I don't think that the problem has to do with the dynamically created rect but I can't seem to isolate the problem. It has to be something simple that I'm overlooking and I think the problem might be in my crop button code.
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
//target.selectable = true;
target.setCoords();
console.log(target);
canvas.renderAll();
//canvas.remove(mask);
}
});
Here is a fiddle to the dynamic code that has the problem:
https://jsfiddle.net/Larry_Robertson/mqrv5fnt/
Here is a fiddle to the static code that I gained proof of concept from:
https://jsfiddle.net/Larry_Robertson/f34q67op/
Source code of Dynamic Version:
HTML
<canvas id="c" width="500" height="500" style="border:1px solid #ccc"></canvas>
<button id="crop">Crop</button>
JS
var canvas = new fabric.Canvas('c', {
selection: true
});
var rect, isDown, origX, origY, done, object, mask, target;
var src = "http://fabricjs.com/lib/pug.jpg";
fabric.Image.fromURL(src, function(img) {
img.selectable = false;
img.id = 'image';
object = img;
canvas.add(img);
});
canvas.on('object:added', function(e) {
target = null;
mask = null;
canvas.forEachObject(function(obj) {
//alert(obj.get('id'));
var id = obj.get('id');
if (id === 'image') {
target = obj;
}
if (id === 'mask') {
//alert('mask');
mask = obj;
}
});
});
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
//target.selectable = true;
target.setCoords();
console.log(target);
canvas.renderAll();
//canvas.remove(mask);
}
});
canvas.on('mouse:down', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = true;
var pointer = canvas.getPointer(o.e);
origX = pointer.x;
origY = pointer.y;
rect = new fabric.Rect({
left: origX,
top: origY,
//originX: 'left',
//originY: 'top',
width: pointer.x - origX,
height: pointer.y - origY,
//angle: 0,
fill: 'rgba(255,0,0,0.3)',
transparentCorners: false,
//selectable: true,
id: 'mask'
});
canvas.add(rect);
canvas.renderAll();
});
canvas.on('mouse:move', function(o) {
if (done) {
canvas.renderAll();
return;
}
if (!isDown) return;
var pointer = canvas.getPointer(o.e);
if (origX > pointer.x) {
rect.set({
left: Math.abs(pointer.x)
});
}
if (origY > pointer.y) {
rect.set({
top: Math.abs(pointer.y)
});
}
rect.set({
width: Math.abs(origX - pointer.x)
});
rect.set({
height: Math.abs(origY - pointer.y)
});
canvas.renderAll();
});
canvas.on('mouse:up', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = false;
//rect.selectable = true;
rect.set({
selectable: true
});
rect.setCoords();
canvas.setActiveObject(rect);
canvas.bringToFront(rect);
canvas.renderAll();
//alert(rect);
rect.setCoords();
object.clipPath = rect;
object.selectable = true;
object.setCoords();
canvas.renderAll();
//canvas.remove(rect);
done = true;
});
You need to set the dirty parameter on image on true, so object's cache will be rerendered next render call.
Here is the fiddle:
https://jsfiddle.net/mqrv5fnt/115/
var canvas = new fabric.Canvas('c', {
selection: true
});
var rect, isDown, origX, origY, done, object, mask, target;
var src = "http://fabricjs.com/lib/pug.jpg";
fabric.Image.fromURL(src, function(img) {
img.selectable = false;
img.id = 'image';
object = img;
canvas.add(img);
});
canvas.on('object:added', function(e) {
target = null;
mask = null;
canvas.forEachObject(function(obj) {
//alert(obj.get('id'));
var id = obj.get('id');
if (id === 'image') {
target = obj;
}
if (id === 'mask') {
//alert('mask');
mask = obj;
}
});
});
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
target.dirty=true;
//target.selectable = true;
target.setCoords();
canvas.remove(mask);
canvas.renderAll();
//canvas.remove(mask);
}
});
canvas.on('mouse:down', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = true;
var pointer = canvas.getPointer(o.e);
origX = pointer.x;
origY = pointer.y;
rect = new fabric.Rect({
left: origX,
top: origY,
//originX: 'left',
//originY: 'top',
width: pointer.x - origX,
height: pointer.y - origY,
//angle: 0,
fill: 'rgba(255,0,0,0.3)',
transparentCorners: false,
//selectable: true,
id: 'mask'
});
canvas.add(rect);
canvas.renderAll();
});
canvas.on('mouse:move', function(o) {
if (done) {
canvas.renderAll();
return;
}
if (!isDown) return;
var pointer = canvas.getPointer(o.e);
if (origX > pointer.x) {
rect.set({
left: Math.abs(pointer.x)
});
}
if (origY > pointer.y) {
rect.set({
top: Math.abs(pointer.y)
});
}
rect.set({
width: Math.abs(origX - pointer.x)
});
rect.set({
height: Math.abs(origY - pointer.y)
});
canvas.renderAll();
});
canvas.on('mouse:up', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = false;
//rect.selectable = true;
rect.set({
selectable: true
});
rect.setCoords();
canvas.setActiveObject(rect);
canvas.bringToFront(rect);
canvas.renderAll();
//alert(rect);
rect.setCoords();
object.clipPath = rect;
object.selectable = true;
object.setCoords();
canvas.renderAll();
//canvas.remove(rect);
done = true;
});

recorded audio distorted in safari 11 when using visualization with web rtc

I've created a demo to show this, it seems that when I show a visualization the recorded audio gets really crunchy. Is there any way I can fix this? This works in chrome and Firefox, but safari is the only offender.
<style>
html, body {
margin: 0!important;
padding: 0!important;
overflow: hidden!important;
width: 100%;
}
</style>
<title>Audio Recording | RecordRTC</title>
<h1>Simple Audio Recording using RecordRTC</h1>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled>Stop Recording</button>
<label><input id="visualizer" type="checkbox" checked/>Show Visualizer</label>
<hr>
<canvas id="audio-canvas"></canvas>
<div><audio controls autoplay></audio></div>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script>
var audio = document.querySelector('audio');
function captureMicrophone(callback) {
if(microphone) {
callback(microphone);
return;
}
if(typeof navigator.mediaDevices === 'undefined' || !navigator.mediaDevices.getUserMedia) {
alert('This browser does not supports WebRTC getUserMedia API.');
if(!!navigator.getUserMedia) {
alert('This browser seems supporting deprecated getUserMedia API.');
}
}
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false
}
}).then(function(mic) {
callback(mic);
}).catch(function(error) {
alert('Unable to capture your microphone. Please check console logs.');
console.error(error);
});
}
function replaceAudio(src) {
var newAudio = document.createElement('audio');
newAudio.controls = true;
if(src) {
newAudio.src = src;
}
var parentNode = audio.parentNode;
parentNode.innerHTML = '';
parentNode.appendChild(newAudio);
audio = newAudio;
}
function stopRecordingCallback() {
replaceAudio(URL.createObjectURL(recorder.getBlob()));
btnStartRecording.disabled = false;
setTimeout(function() {
if(!audio.paused) return;
setTimeout(function() {
if(!audio.paused) return;
audio.play();
}, 1000);
audio.play();
}, 300);
audio.play();
if(microphone) {
microphone.stop();
microphone = null;
}
}
var recorder; // globally accessible
var microphone;
var btnStartRecording = document.getElementById('btn-start-recording');
var btnStopRecording = document.getElementById('btn-stop-recording');
var canvas = document.getElementById('audio-canvas');
var visualizerCheckbox = document.getElementById('visualizer');
btnStartRecording.onclick = function() {
this.disabled = true;
this.style.border = '';
this.style.fontSize = '';
if (!microphone) {
captureMicrophone(function(mic) {
microphone = mic;
replaceAudio();
audio.muted = true;
audio.play();
btnStartRecording.disabled = false;
btnStartRecording.style.border = '1px solid red';
btnStartRecording.style.fontSize = '150%';
alert('Please click startRecording button again. First time we tried to access your microphone. Now we will record it.');
});
return;
}
replaceAudio();
audio.muted = true;
audio.play();
if(visualizerCheckbox.checked){
initScope(canvas);
}
var options = {
type: 'audio',
numberOfAudioChannels: 2,
checkForInactiveTracks: true,
bufferSize: 16384,
sampleRate: 48000
};
if(recorder) {
recorder.destroy();
recorder = null;
}
recorder = RecordRTC(microphone, options);
recorder.startRecording();
btnStopRecording.disabled = false;
};
btnStopRecording.onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
function initScope(audioScopeCanvas) {
if (audioScopeCanvas) {
var analyser;
var mic;
var javascriptNode;
var canvasContext;
audioContext = new AudioContext(); // NEW!!
analyser = audioContext.createAnalyser();
mic = audioContext.createMediaStreamSource(microphone);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
mic.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
//canvasContext = $("#canvas")[0].getContext("2d");
canvasContext = audioScopeCanvas.getContext("2d");
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += array[i];
}
var average = values / length;
canvasContext.clearRect(0, 0, 60, 130);
canvasContext.fillStyle = '#00ff00';
canvasContext.fillRect(0, 130 - average, 25, 130);
};
}
}
</script>
you can play with it here:
Http://andrew.colchagoff.com/safari-record.html
I'm using record rtc.

StreamTrack's readyState is getting changed to ended, just before playing the stream (MediaStream - MediaStreamTrack - WebRTC)

The jsfiddle (https://jsfiddle.net/kalyansai99/mm1b74uy/22/) contains code where the user can toggle between front and back camera of the mobile.
In few mobiles its working fine (Moto g5 plus, Moto E3 and so on - Chrome Browser) and in few mobiles (Mi Redimi Note 4 - Chrome Browser) when I am switching to back camera, initially the stream is loading with a track of "readyState" as "live". But when i am about to play the stream in video player, the "readyState" is getting changed to "ended" and black screen is been shown on the video tag.
Not sure whats happening. Any clues?
JSFiddle Code
var player = document.getElementById('player');
var flipBtn = document.getElementById('flipBtn');
var deviceIdMap = {};
var front;
var constraints = {
audio: false,
video: {
frameRate: 1000
}
};
var gotDevices = function (deviceList) {
var length = deviceList.length;
console.log(deviceList);
for (var i = 0; i < length; i++) {
var deviceInfo = deviceList[i];
if (deviceInfo.kind === 'videoinput') {
if (deviceInfo.label.indexOf('front') !== -1) {
deviceIdMap.front = deviceInfo.deviceId;
} else if (deviceInfo.label.indexOf('back') !== -1) {
deviceIdMap.back = deviceInfo.deviceId;
}
}
}
if (deviceIdMap.front) {
constraints.video.deviceId = {exact: deviceIdMap.front};
front = true;
} else if (deviceIdMap.back) {
constraints.video.deviceId = {exact: deviceIdMap.back};
front = false;
}
console.log('deviceIdMap - ', deviceIdMap);
};
var handleError = function (error) {
console.log('navigator.getUserMedia error: ', error);
};
function handleSuccess(stream) {
window.stream = stream;
// this is a video track as there is no audio track
console.log("Track - ", window.stream.getTracks()[0]);
console.log('Ready State - ', window.stream.getTracks()[0].readyState);
if (window.URL) {
player.src = window.URL.createObjectURL(stream);
} else {
player.src = stream;
}
player.onloadedmetadata = function (e) {
console.log('Ready State - 3', window.stream.getTracks()[0].readyState);
player.play();
console.log('Ready State - 4', window.stream.getTracks()[0].readyState);
}
console.log('Ready State - 2', window.stream.getTracks()[0].readyState);
}
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
flipBtn.addEventListener('click', function () {
if (window.stream) {
window.stream.getTracks().forEach(function(track) {
track.stop();
});
}
if (front) {
constraints.video.deviceId = {exact: deviceIdMap.back};
} else {
constraints.video.deviceId = {exact: deviceIdMap.front};
}
front = !front;
navigator.getUserMedia(constraints, handleSuccess, handleError);
}, false);
console.log(constraints);
navigator.getUserMedia(constraints, handleSuccess, handleError);
#player {
width: 320px;
}
#flipBtn {
width: 150px;
height: 50px;
}
<video id="player" autoplay></video>
<div>
<button id="flipBtn">
Flip Camera
</button>
</div>
Replace track.stop() to track.enabled=false and when adding track to the stream, enable it back using track.enabled=true
The MediaStream.readyState property is changed to "ended" when we stop the track and can never be used again. Therefore its not wise to use stop. For more reference:
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/readyState
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/stop

web audio api record function

i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.

How to find co-ordinates from google map api to display markers from database in map

I have a search page in which I am displaying properties from my database as markers in google map. Now I want to modify this search page so that any user will draw either a circle or polygon in the google map and will get properties from our database only in the selected area (drawn by user itself). I am attaching my code so far here.
The circle in the image shows the selected area and the result must display properties available in the selected region. My table contains 2 different fields for latitude and longitude of each property.
Javascript code:
<script type="text/javascript">
var drawingManager;
var all_overlays = [];
var selectedShape;
var colors = ['#1E90FF', '#FF1493', '#32CD32', '#FF8C00', '#4B0082'];
var selectedColor;
var colorButtons = {};
function clearSelection() {
if (selectedShape) {
selectedShape.setEditable(false);
selectedShape = null;
}
}
function setSelection(shape) {
clearSelection();
selectedShape = shape;
shape.setEditable(true);
selectColor(shape.get('fillColor') || shape.get('strokeColor'));
}
function deleteSelectedShape() {
if (selectedShape) {
selectedShape.setMap(null);
}
}
function deleteAllShape() {
for (var i = 0; i < all_overlays.length; i++) {
all_overlays[i].overlay.setMap(null);
}
all_overlays = [];
}
function selectColor(color) {
selectedColor = color;
for (var i = 0; i < colors.length; ++i) {
var currColor = colors[i];
colorButtons[currColor].style.border = currColor == color ? '2px solid #789' : '2px solid #fff';
}
// Retrieves the current options from the drawing manager and replaces the
// stroke or fill color as appropriate.
var polylineOptions = drawingManager.get('polylineOptions');
polylineOptions.strokeColor = color;
drawingManager.set('polylineOptions', polylineOptions);
var rectangleOptions = drawingManager.get('rectangleOptions');
rectangleOptions.fillColor = color;
drawingManager.set('rectangleOptions', rectangleOptions);
var circleOptions = drawingManager.get('circleOptions');
circleOptions.fillColor = color;
drawingManager.set('circleOptions', circleOptions);
var polygonOptions = drawingManager.get('polygonOptions');
polygonOptions.fillColor = color;
drawingManager.set('polygonOptions', polygonOptions);
}
function setSelectedShapeColor(color) {
if (selectedShape) {
if (selectedShape.type == google.maps.drawing.OverlayType.POLYLINE) {
selectedShape.set('strokeColor', color);
} else {
selectedShape.set('fillColor', color);
}
}
}
function makeColorButton(color) {
var button = document.createElement('span');
button.className = 'color-button';
button.style.backgroundColor = color;
google.maps.event.addDomListener(button, 'click', function () {
selectColor(color);
setSelectedShapeColor(color);
});
return button;
}
function buildColorPalette() {
var colorPalette = document.getElementById('color-palette');
for (var i = 0; i < colors.length; ++i) {
var currColor = colors[i];
var colorButton = makeColorButton(currColor);
colorPalette.appendChild(colorButton);
colorButtons[currColor] = colorButton;
}
selectColor(colors[0]);
}
function initialize() {
var map = new google.maps.Map(document.getElementById('map'), {
zoom: 6,
center: new google.maps.LatLng(22, 77),
mapTypeId: google.maps.MapTypeId.HYBRID,
disableDefaultUI: true,
zoomControl: true
});
var polyOptions = {
strokeWeight: 0,
fillOpacity: 0.45,
editable: true
};
// Creates a drawing manager attached to the map that allows the user to draw
// markers, lines, and shapes.
drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.POLYGON,
markerOptions: {
draggable: true
},
polylineOptions: {
editable: true
},
rectangleOptions: polyOptions,
circleOptions: polyOptions,
polygonOptions: polyOptions,
map: map
});
google.maps.event.addListener(drawingManager, 'overlaycomplete', function (e) {
all_overlays.push(e);
if (e.type != google.maps.drawing.OverlayType.MARKER) {
// Switch back to non-drawing mode after drawing a shape.
drawingManager.setDrawingMode(null);
// Add an event listener that selects the newly-drawn shape when the user
// mouses down on it.
var newShape = e.overlay;
newShape.type = e.type;
google.maps.event.addListener(newShape, 'click', function () {
setSelection(newShape);
});
setSelection(newShape);
}
});
// Clear the current selection when the drawing mode is changed, or when the
// map is clicked.
google.maps.event.addListener(drawingManager, 'drawingmode_changed', clearSelection);
google.maps.event.addListener(map, 'click', clearSelection);
google.maps.event.addDomListener(document.getElementById('delete-button'), 'click', deleteSelectedShape);
google.maps.event.addDomListener(document.getElementById('delete-all-button'), 'click', deleteAllShape);
buildColorPalette();
}
google.maps.event.addDomListener(window, 'load', initialize);
</script>
The code above contains some events to handle the user interaction in search page to draw cirle,polygone to search property in selected region.
The display property button in the image must show property markers in the selected area.
How would I get the latitude and longitude of selected area of circle or polygon?
How Would I query my database for that range of co-ordinates?
Thanking you very much in advance for your kind help...
I suppose you have found a solution since you post this question.
However, if someone else needs an answer, this is how you can get lat / lng of a drawing polygon :
google.maps.event.addDomListener(drawingManager, 'polygoncomplete', function(polygon) {
var polygonBounds = polygon.getPath();
}
The bounds of each vertices of your polygon are now in polygonBounds, just iterate over it and do what you want with lat / lng
For a circle, you just need lat / lng of the center, and its radius
google.maps.event.addListener(drawingManager, 'circlecomplete', function(circle) {
var radius = circle.getRadius();
var center = circle.getCenter();
}
Hope it can help someone
PS : I'm using GoogleMaps v3.14, I never test with other versions.