-
Notifications
You must be signed in to change notification settings - Fork 0
/
yankring_history_v2.txt
100 lines (100 loc) · 21.7 KB
/
yankring_history_v2.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
A,v
AVRDUDE_USERPORT ?= /dev/tty.usbserial.A901B1I4,V
4,v
2,v
1,v
5,v
int a = 1;,V
-,v
int a = -1;,V
printf("%d" a % 6);,V
cmd(0),V
/dev/ttyUSB2,v
cmd(START),V
SYNC,v
cmd(SYNC),V
cmd(r),V
led.write(chr(170)),V
led.write(chr(r)) led.write(cg) led.write(b),V
(,v
O ,V
user/sdfasd/ asdfasdf.c \ asdfasdf ,V
AVRDUDE_USERPORT ?= /dev/ttyUSB0,V
{,v
#,v
$env{ID_SERIAL_SHORT},v
",v
,,v
$env{ID_SERIAL_SHORT},v
PROGRAM="/etc/udev/rules.d/usb-parse-devpath.py,v
tty.usbserial.%E{ID_SERIAL_SHORT},v
ID_SERIAL_SHORT,v
.,v
slug = slug.encode('ascii', 'ignore').lower(),V
slug= re.sub,V
v1,v
#USERSRC = user/playingField2012/rfmon.c,V
*,v
`,v
,v
s,v
l,v
b,v
u,v
n,v
dd,V
,V
,V
PORTB 10110001 11111,V
0,v
10110000kjk,v
i,v
nt *lastSeen[6]={0};,v
t,v
a,v
S,v
e,v
[,v
6,v
],v
=,v
},v
;,v
gameData.coords[0] = robots[id_a];//mark the best matched robot for(int i = 4; i>-1; i--){,v
C,v
CvMat *projection = 0; // maps from frame coords to physical coordsCvMat *displayMatrix = 0; // maps from physical coords to display coordsCvMat *invProjection = 0; // maps from physical coords to frame coordsint warpDisplay = 0, showFPS = 0;int showPhotoFinish = 0;bool showFilteredSquares = 0;CvPoint2D32f projectionPoints[4];CvPoint2D32f sampleCorners[4];int nextExclude = 0;CvPoint2D32f excludeCorners[10][4];int sampleColors = 0;int thisBoard = 0;char boardLetter = '*';void computeDisplayMatrix() { if (displayMatrix) cvReleaseMat(&displayMatrix); if (warpDisplay) { CvMat *M = cvCreateMat(3,3, CV_32FC1); CvPoint2D32f src[4] = {cvPoint2D32f(X_MIN,Y_MAX),cvPoint2D32f(X_MAX,Y_MAX),cvPoint2D32f(X_MAX,Y_MIN),cvPoint2D32f(X_MIN,Y_MIN)}; //CvPoint2D32f dst[4] = {cvPoint2D32f(0,0),cvPoint2D32f(frameHeight,0),cvPoint2D32f(frameHeight,frameHeight),cvPoint2D32f(0,frameHeight)}; CvPoint2D32f dst[4] = {cvPoint2D32f(0,0),cvPoint2D32f(displayHeight,0),cvPoint2D32f(displayHeight,displayHeight),cvPoint2D32f(0,displayHeight)}; cvGetPerspectiveTransform(src, dst, M); displayMatrix = M; } else displayMatrix = cvCloneMat(invProjection);}void saveExclusions() { CvMat matrix = cvMat(10,4,CV_32FC2,excludeCorners); char buf[256]; sprintf(buf, "Exclusions%c.xml", boardLetter); cvSave( buf, &matrix, 0, 0, cvAttrList(0, 0) );}void mouseHandler(int event, int x, int y, int flags, void *param) { CvPoint2D32f point = cvPoint2D32f(x,y); if (event == CV_EVENT_LBUTTONDOWN && nextMousePoint < 4) { CvPoint2D32f *arr; if (mouseOperation == PICK_PROJECTION_CORNERS) arr = projectionPoints; else if (mouseOperation == PICK_SAMPLE_CORNERS) arr = sampleCorners; else if (mouseOperation == PICK_EXCLUDE_CORNERS) arr = excludeCorners[nextExclude]; arr[nextMousePoint++] = point; if (nextMousePoint == 4) { switch (mouseOperation) { case PICK_PROJECTION_CORNERS: projection_init(&projection, &invProjection, projectionPoints, bounds); computeDisplayMatrix(); CvMat matrix = cvMat(4,1,CV_32FC2,projectionPoints); char buf[256]; sprintf(buf, "Projection%c.xml", boardLetter); cvSave( buf, &matrix, 0, 0, cvAttrList(0, 0) ); printf("project init %s\n", (projection && invProjection) ? "succeeded" : "failed"); break; case PICK_SAMPLE_CORNERS: sampleColors = 1; break; case PICK_EXCLUDE_CORNERS: nextExclude++; saveExclusions(); break; } } }}IplImage *filter_image( IplImage *img ) { CvSize sz = cvSize( img->width & -2, img->height & -2 ); //IplImage *timg = cvCloneImage( img ); // make a copy of input image //IplImage *pyr = cvCreateImage( cvSize(sz.width/2, sz.height/2), 8, 3 ); IplImage *tgray; // select the maximum ROI in the image // with the width and height divisible by 2 cvSetImageROI( img, cvRect( 0, 0, sz.width, sz.height )); // down-scale and upscale the image to filter out the noise //cvPyrDown( timg, pyr, 7 ); //cvPyrUp( pyr, timg, 7 ); tgray = cvCreateImage( sz, 8, 1 ); cvCvtColor(img, tgray, CV_BGR2GRAY); //cvReleaseImage( &pyr ); //cvReleaseImage( &timg ); return tgray;}double getObjectDistance(board_coord a, board_coord b){ if (a.id != 0xFF || b.id != 0xFF){ return INFINITY; } return dist_sq(cvPoint(a.x,a.y), cvPoint(b.x,b.y));}typedef struct { int prevIdx; int curIdx; double distance;} obj_dist;int compareDists(const void *a, const void *b){ obj_dist *A = (obj_dist*)a; obj_dist *B = (obj_dist*)b; if (A->distance < B->distance) return -1; else if (A->distance == B->distance) return 0; else return 1;}// returns sequence of squares detected on the image.// the sequence is stored in the shared memory storageCvSeq *findCandidateSquares(IplImage *tgray) { CvSeq *contours; int i; CvSize sz = cvSize( tgray->width & -2, tgray->height & -2 );//makes numbers even IplImage *gray = cvCreateImage( sz, 8, 1 ); CvSeq *result; double s, t; // create empty sequence that will contain points - // 4 points per square (the square's vertices) CvSeq *squares = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvPoint), storage ); cvThreshold( tgray, gray, threshold, 255, CV_THRESH_BINARY ); //cvAdaptiveThreshold(tgray, gray, 255, CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY, 13, threshold); if (showFilteredSquares) { cvShowImage(WND_FILTERED_SQUARES, gray); } // find contours and store them all as a list cvFindContours( gray, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) ); // test each contour while( contours ) { // approximate contour with accuracy proportional // to the contour perimeter result = cvApproxPoly( contours, sizeof(CvContour), storage, CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 ); // square contours should have 4 vertices after approximation // relatively large area (to filter out noisy contours) // and be convex. // Note: absolute value of an area is used because // area may be positive or negative - in accordance with the // contour orientation if( result->total == 4 && -cvContourArea(result,CV_WHOLE_SEQ,1) > min_area && -cvContourArea(result,CV_WHOLE_SEQ,1) < max_area && cvCheckContourConvexity(result)) { s = 0; for( i = 2; i < 5; i++ ) { // find minimum angle between joint // edges (maximum of cosine) t = fabs(cosAngle((CvPoint*)cvGetSeqElem( result, i ), (CvPoint*)cvGetSeqElem( result, i-2 ), (CvPoint*)cvGetSeqElem( result, i-1 ))); s = s > t ? s : t; } // if cosines of all angles are small (angles are ~90 degrees) if( s < 0.3 ) { CvPoint pt[4]; for (i=0; i<4; i++) pt[i] = *(CvPoint*)cvGetSeqElem(result, 3-i); // calculate the length of each side double side_len[4]; side_len[0] = dist_sq(pt[0],pt[1]); side_len[1] = dist_sq(pt[1],pt[2]); side_len[2] = dist_sq(pt[2],pt[3]); side_len[3] = dist_sq(pt[3],pt[0]); double tolerance = (double)side_tolerance / 100.; // check to make sure all sides are approx. the same length as side 0 if (fabs(side_len[0] - side_len[1])/side_len[0] <= tolerance && fabs(side_len[0] - side_len[2])/side_len[0] <= tolerance && fabs(side_len[0] - side_len[3])/side_len[0] <= tolerance) { // then write quandrange vertices to resultant sequence in clockwise order for( i = 0; i < 4; i++ ) cvSeqPush( squares, &pt[i] ); } } } // take the next contour contours = contours->h_next; } // release all the temporary images cvReleaseImage( &gray ); return squares;}int cvPrintf(IplImage *img, CvFont *font, CvPoint pt, CvScalar color, const char *format, ...) { static char buffer[2048]; va_list ap; int count; va_start(ap, format); count = vsnprintf(buffer, 2048, format, ap); va_end(ap); cvPutText(img, buffer, pt, font, color); return count;}void getBitSamplingTransform(CvPoint pt[4], CvMat **H);void estimateReticleParams(CvMat *H, float *cx, float *cy, float *radius, float *theta) { CvPoint2D32f vf[3]; CvMat m = cvMat(1, 3, CV_32FC2, vf); vf[0] = cvPoint2D32f(2.0, 2.0); vf[1] = cvPoint2D32f(3.0, 2.0); vf[2] = cvPoint2D32f(2.0, 3.0); cvPerspectiveTransform(&m, &m, H); *cx = vf[0].x; *cy = vf[0].y; float a = sqrt((vf[1].x-vf[0].x)*(vf[1].x-vf[0].x) + (vf[1].y-vf[0].y)*(vf[1].y-vf[0].y)); float b = sqrt((vf[2].x-vf[0].x)*(vf[2].x-vf[0].x) + (vf[2].y-vf[0].y)*(vf[2].y-vf[0].y)); *radius = sqrt(a*b)*5; vf[0] = cvPoint2D32f(0.0, 0.0); vf[1] = cvPoint2D32f(cos(*theta), sin(*theta)); cvPerspectiveTransform(&m, &m, displayMatrix); *theta = -atan2(vf[1].y-vf[0].y, vf[1].x-vf[0].x);}void drawChevron(IplImage *out, float theta, float alpha, float dtheta, float t, int big, float cx, float cy, float radius) { CvPoint v[4]; CvPoint2D32f vf[4]; CvMat m = cvMat(1, 4, CV_32FC2, vf); CvPoint *p[1]; int count[1]; if (big) dtheta = acos((1.0/alpha) * (1 + t/(2.0*radius))); else alpha -= t/(2.0*radius); v[0] = cvPoint(8*(cx + radius* cos(theta+dtheta)), 8*(cy - radius* sin(theta+dtheta))); v[1] = cvPoint(8*(cx + radius*alpha*cos(theta )), 8*(cy - radius*alpha*sin(theta ))); v[2] = cvPoint(8*(cx + radius* cos(theta-dtheta)), 8*(cy - radius* sin(theta-dtheta))); v[3] = cvPoint(8*(cx + radius* cos(theta )), 8*(cy - radius* sin(theta ))); p[0] = &v[0]; count[0] = 4; cvFillPoly(out, p, count, 1, CV_RGB(0,255,255), CV_AA, 3);}void drawCallout(IplImage *out, float cx, float cy, float radius, int id) { CvPoint v[4]; CvPoint2D32f vf[4]; CvMat m = cvMat(1, 4, CV_32FC2, vf); CvPoint *p[1]; int count[1]; char buf[256]; sprintf(buf, "Team 00"); CvSize maxTextSize; int baseline; cvGetTextSize(buf, &font, &maxTextSize, &baseline); sprintf(buf, "Team %i", id); CvSize textSize; cvGetTextSize(buf, &font, &textSize, &baseline); float y; float flipY = (warpDisplay ? displayHeight : frameHeight) - 40; int down = cy+radius+20+textSize.height < flipY; if (down) y = cy+radius+20+textSize.height; else y = cy-radius-20; cvPrintf(out, &font, cvPoint(cx-textSize.width/2.0, y-baseline), CV_RGB(0,255,255), "Team %i", id); float lx = cx-maxTextSize.width/2-10; v[0] = cvPoint(8*(cx-radius), 8*cy); v[1] = cvPoint(8*lx, 8*cy); v[2] = cvPoint(8*lx, 8*y); v[3] = cvPoint(8*(cx+textSize.width/2), 8*y); p[0] = &v[0]; count[0] = 4; cvPolyLine(out, p, count, 1, 0, CV_RGB(0,255,255), 2, CV_AA, 3);}void drawSquare(IplImage *out, IplImage *gray, CvPoint pt[4], CvPoint2D32f bit_pt_true[16], int id, CvPoint2D32f orientationHandle, float theta) { // draw the square as a closed polyline CvPoint v[20]; CvPoint2D32f vf[20]; CvMat m; CvPoint *p[10]; int count[10]; float l = -1.0, r=5.0, w=4.0; // w is size of circle exclusion zone CvMat *H = cvCreateMat(3,3,CV_32FC1); // tag coords to display coords CvMat *A = 0; // tag coords to frame coords CvMat *B = cvCreateMat(3,3,CV_32FC1); // frame coords to display coords getBitSamplingTransform(pt, &A); cvMatMul(displayMatrix, projection, B); cvMatMul(B, A, H); cvReleaseMat(&B); //black out square area in original grayscale image since it has been processed vf[0] = cvPoint2D32f(l-w, l-w); vf[1] = cvPoint2D32f(r+w, l-w); vf[2] = cvPoint2D32f(r+w, r+w); vf[3] = cvPoint2D32f(l-w, r+w); m = cvMat(1, 4, CV_32FC2, vf); cvPerspectiveTransform(&m, &m, A); for (int i=0; i<4; i++) v[i] = cvPoint(vf[i].x*8, vf[i].y*8); p[0] = v; count[0] = 4; cvFillPoly(gray, p, count, 1, CV_RGB(0,0,0), 8, 3); cvReleaseMat(&A); if (id == -1) { cvReleaseMat(&H); return; } vf[0] = cvPoint2D32f(l, l); vf[1] = cvPoint2D32f(r, l); vf[2] = cvPoint2D32f(r, r); vf[3] = cvPoint2D32f(l, r); m = cvMat(1, 4, CV_32FC2, vf); cvPerspectiveTransform(&m, &m, H); for (int i=0; i<4; i++) v[i] = cvPoint(vf[i].x*8, vf[i].y*8); p[0] = v; count[0] = 4; cvFillPoly(out, p, count, 1, CV_RGB(0,0,0), CV_AA, 3); for (int i=0; i<5; i++) { vf[2*i+ 0] = cvPoint2D32f(i,l); vf[2*i+ 1] = cvPoint2D32f(i,r); vf[2*i+10] = cvPoint2D32f(l,i); vf[2*i+11] = cvPoint2D32f(r,i); p[i+0] = &v[2*i+0]; count[i+0] = 2; p[i+5] = &v[2*i+10]; count[i+5] = 2; } m = cvMat(1, 20, CV_32FC2, vf); cvPerspectiveTransform(&m, &m, H); for (int i=0; i<20; i++) v[i] = cvPoint(vf[i].x*8, vf[i].y*8); cvPolyLine(out, p, count, 10, 0, CV_RGB(0,255,255), 1, CV_AA, 3); float cx, cy, radius; estimateReticleParams(H, &cx, &cy, &radius, &theta); float t = 2.0; cvCircle(out, cvPoint(cx*8,cy*8), radius*8, CV_RGB(0,255,255), t, CV_AA, 3); drawChevron(out, theta, 1.3, 0., t, 1, cx, cy, radius); drawChevron(out, theta+M_PI/4, 0.9, .1, t, 0, cx, cy, radius); drawChevron(out, theta-M_PI/4, 0.9, .1, t, 0, cx, cy, radius); drawChevron(out, theta+3*M_PI/4, 0.9, .1, t, 0, cx, cy, radius); drawChevron(out, theta-3*M_PI/4, 0.9, .1, t, 0, cx, cy, radius); for (int i=0; i<4; i++) drawChevron(out, theta+i*M_PI/2, 0.5, .01, t, 0, cx, cy, radius); cvReleaseMat(&H); drawCallout(out, cx, cy, radius, id);}void getBitSamplingTransform(CvPoint pt[4], CvMat **H) { const float l = -.5, r = 4.5; CvPoint2D32f src[4] = {{l,l},{r,l},{r,r},{l,r}}; CvPoint2D32f dst[4] = {{pt[0].x,pt[0].y},{pt[1].x,pt[1].y},{pt[2].x,pt[2].y},{pt[3].x,pt[3].y}}; *H = cvCreateMat(3,3,CV_32FC1); *H = cvGetPerspectiveTransform(src, dst, *H);}int getOrientationFromBits(int bit_raw[16], int *orientation) { int corner[4]; corner[0] = bit_raw[0]; corner[1] = bit_raw[3]; corner[2] = bit_raw[15]; corner[3] = bit_raw[12]; // registration corner is the white corner whose clockwise neighbor is black *orientation = -1; for (int j=0; j<4; j++) { if(corner[j] && !corner[(j+1) % 4]) { if (*orientation != -1) { // corner is ambiguous *orientation = -1; break; } *orientation = j; } } return *orientation != -1;}int getIDFromBits(int bit_true[16], int *id) { *id = (bit_true[5] << 0) + (bit_true[6] << 1) + (bit_true[9] << 2) + (bit_true[10] << 3); /* *id = (bit_true[5] << 0) + (bit_true[6] << 1) + (bit_true[9] << 2) + (bit_true[10] << 3) + (bit_true[1] << 4) + (bit_true[2] << 5) + (bit_true[4] << 6) + (bit_true[7] << 7) + (bit_true[8] << 8) + (bit_true[11] << 9) + (bit_true[13] << 10) + (bit_true[14] << 11) + ((bit_true[12] + bit_true[15]) << 12); */ return 1;}void rotateBitsToOrientation(CvPoint2D32f bit_pt_raw[16], int bit_raw[16], int orientation, CvPoint2D32f bit_pt_true[16], int bit_true[16]) { // shift indices so that orientation -> 0 for (int j=0; j<16; j++) { int x = j%4, y=j/4; int xp, yp; switch (orientation) { case 0: xp = x; yp = y; break; case 1: xp = 3-y; yp = x; break; case 2: xp = 3-x; yp = 3-y; break; case 3: xp = y; yp = 3-x; break; } int j_raw = xp + yp*4; bit_pt_true[j] = bit_pt_raw[j_raw]; bit_true[j] = bit_raw[j_raw]; }}int readPattern(IplImage *img, CvPoint pt[4], CvPoint2D32f bit_pt_true[16], int *id) { CvPoint2D32f bit_pt_raw[16]; int bit_raw[16], bit_true[16]; CvMat *H; getBitSamplingTransform(pt, &H); //calculate the coordinates of each bit for (int j=0; j<16; j++) bit_pt_raw[j] = cvPoint2D32f(.5 + j%4, .5 + j/4); CvMat pts = cvMat(1, 16, CV_32FC2, bit_pt_raw); cvPerspectiveTransform(&pts, &pts, H); for (int j=0; j<16; j++) { CvScalar sample = cvGet2D(img, bit_pt_raw[j].y, bit_pt_raw[j].x); bit_raw[j] = ((sample.val[0] + sample.val[1] + sample.val[2])/3. > threshold); } cvReleaseMat(&H); int num=0; for (int j=0; j<16; j++) num |= bit_raw[j]<<j; int orientation, dist; HAMMING_DECODE(num, id, &orientation, &dist); *id += 1; // 1 to 32 //printf("%5d %2d %1d %1d\n", num, *id, orientation, dist); if (dist>3) return 0; rotateBitsToOrientation(bit_pt_raw, bit_raw, orientation, bit_pt_true, bit_true); return 1;}void getCenterFromBits(CvPoint2D32f bit_pt_true[16], CvPoint2D32f *trueCenter) { CvPoint2D32f rawCenter = cvPoint2D32f((bit_pt_true[0].x + bit_pt_true[3].x + bit_pt_true[15].x + bit_pt_true[12].x)/4, (bit_pt_true[0].y + bit_pt_true[3].y + bit_pt_true[15].y + bit_pt_true[12].y)/4); *trueCenter = project(projection, rawCenter);}float getThetaFromAffine(CvPoint2D32f bit_pt_true[16]) { const float l = 0.5, r = 3.5; CvPoint2D32f src[4] = {{l,l},{r,l},{r,r},{l,r}}; CvPoint2D32f dst[4] = {bit_pt_true[0],bit_pt_true[3],bit_pt_true[15],bit_pt_true[12]}; CvMat *A = cvCreateMat(2,3,CV_32FC1); CvMat srcM = cvMat(1, 4, CV_32FC2, src); CvMat dstM = cvMat(1, 4, CV_32FC2, dst); cvEstimateRigidTransform(&srcM, &dstM, A, 1); // use affine approximation and SVD to determine angle float A22_mat[2][2] = {{cvmGet(A, 0, 0),cvmGet(A, 0, 1)},{cvmGet(A, 1, 0),cvmGet(A, 1, 1)}}; CvMat A22 = cvMat(2,2,CV_32FC1,A22_mat); float U_mat[2][2], W_mat[2][2], V_mat[2][2], R_mat[2][2]; CvMat U = cvMat(2,2,CV_32FC1,U_mat); CvMat W = cvMat(2,2,CV_32FC1,W_mat); CvMat V = cvMat(2,2,CV_32FC1,V_mat); CvMat R = cvMat(2,2,CV_32FC1,R_mat); cvSVD(&A22, &W, &U, &V, CV_SVD_U_T|CV_SVD_V_T); // A = U D V^T cvTranspose(&U, &U); cvMatMulAdd(&U, &V, 0, &R); float theta = atan2(R_mat[1][0], R_mat[0][0]); cvReleaseMat(&A); return theta;}float getThetaFromExtension(CvPoint2D32f bit_pt_true[16], CvPoint2D32f trueCenter) { //to find the heading, "extend" the top and bottom edges 4x to the right and take // the average endpoint, then project this and take the dx and dy in the projected // space to find the angle it makes fiducial_t fiducial; fiducial.corners[0] = bit_pt_true[0]; fiducial.corners[1] = bit_pt_true[3]; fiducial.corners[2] = bit_pt_true[15]; fiducial.corners[3] = bit_pt_true[12]; int extended_top_x = fiducial.corners[TR].x + (fiducial.corners[TR].x - fiducial.corners[TL].x)*4; int extended_top_y = fiducial.corners[TR].y + (fiducial.corners[TR].y - fiducial.corners[TL].y)*4; int extended_bottom_x = fiducial.corners[BR].x + (fiducial.corners[BR].x - fiducial.corners[BL].x)*4; int extended_bottom_y = fiducial.corners[BR].y + (fiducial.corners[BR].y - fiducial.corners[BL].y)*4; int extended_avg_x = (extended_top_x+extended_bottom_x)/2; int extended_avg_y = (extended_top_y+extended_bottom_y)/2; //project into coordinate space CvPoint2D32f projected_extension = project(projection, cvPoint2D32f(extended_avg_x,extended_avg_y)); //find the dx and dy with respect to the fiducial's center point float dx = ((float)projected_extension.x-(float)trueCenter.x); float dy = ((float)projected_extension.y-(float)trueCenter.y); return atan2(dy,dx);}void processRobotDetection(CvPoint2D32f trueCenter, float theta, int id, CvPoint2D32f *orientationHandle) { *orientationHandle = cvPoint2D32f(trueCenter.x + FOOT*cos(theta), trueCenter.y + FOOT*sin(theta)); *orientationHandle = project(invProjection, *orientationHandle); sightings[id]+=2; int x = clamp(trueCenter.x, X_MIN, X_MAX); int y = clamp(trueCenter.y, Y_MIN, Y_MAX); int t = theta / CV_PI * 2048; //store robot coordinates robots[id].id = id; robots[id].x = x; robots[id].y = y; robots[id].theta = t; //change theta from +/- PI to +/-2048 (signed 12 bit int)//if (0)// printf("X: %04i, Y: %04i, theta: %04i, theta_act: %f, proj_x:%f, proj_y:%f \n", x, y, t, theta, orientationHandle->x, orientationHandle->y);},V
#include "vision_mock1.h"#include "table.h"#include <strings.h>#include <stdlib.h>#include <stdbool.h>double matchStartTime;int matchState = MATCH_ENDED;,V
targetStarted = 0; } else if ( c == '!' ) { matchStartTime = timeNow()-MATCH_LEN_SECONDS; pthread_mutex_lock(&serial_lock); sendStartPacket = 0; sendStopPacket = 10; matchState = MATCH_ENDED; pthread_mutex_unlock(&serial_lock);,V
if (sidebar) cvReleaseImage(&sidebar);,v
if (sidebar) cvReleaseImage(&sidebar);,v
),v
printf("6");,V
printf("4");,V
printf("2");,V
/,v
g,v
m,v
I,v
d,v
h,v
c,v
_,v
r,v
L,v
o,v
,,v
+,v
f,v
x,v
y,v
8,v
p,v
K,v
W,v
v,v
&,v
R,v
>,v
w,v
cvResetImageROI(frame1);,v
,v