diff --git a/.gitbook/assets/1546190477979.png b/.gitbook/assets/1546190477979.png new file mode 100644 index 0000000..67f0a2b --- /dev/null +++ b/.gitbook/assets/1546190477979.png Binary files differ diff --git a/.gitbook/assets/1546190517286.png b/.gitbook/assets/1546190517286.png new file mode 100644 index 0000000..e9b9495 --- /dev/null +++ b/.gitbook/assets/1546190517286.png Binary files differ diff --git a/.gitbook/assets/1546190552329.png b/.gitbook/assets/1546190552329.png new file mode 100644 index 0000000..2976b26 --- /dev/null +++ b/.gitbook/assets/1546190552329.png Binary files differ diff --git a/.gitbook/assets/1546190828503.png b/.gitbook/assets/1546190828503.png new file mode 100644 index 0000000..2499866 --- /dev/null +++ b/.gitbook/assets/1546190828503.png Binary files differ diff --git a/.gitbook/assets/1546190853493.png b/.gitbook/assets/1546190853493.png new file mode 100644 index 0000000..0fb37bc --- /dev/null +++ b/.gitbook/assets/1546190853493.png Binary files differ diff --git a/.gitbook/assets/1546190996920.png b/.gitbook/assets/1546190996920.png new file mode 100644 index 0000000..5df5f0b --- /dev/null +++ b/.gitbook/assets/1546190996920.png Binary files differ diff --git a/.gitbook/assets/1546191110511.png b/.gitbook/assets/1546191110511.png new file mode 100644 index 0000000..a71ced6 --- /dev/null +++ b/.gitbook/assets/1546191110511.png Binary files differ diff --git a/.gitbook/assets/1546191222687.png b/.gitbook/assets/1546191222687.png new file mode 100644 index 0000000..9768583 --- /dev/null +++ b/.gitbook/assets/1546191222687.png Binary files differ diff --git a/.gitbook/assets/1546191243862.png b/.gitbook/assets/1546191243862.png new file mode 100644 index 0000000..39200be --- /dev/null +++ b/.gitbook/assets/1546191243862.png Binary files differ diff --git a/.gitbook/assets/1546192242493.png b/.gitbook/assets/1546192242493.png new file mode 100644 index 0000000..22754b1 --- /dev/null +++ b/.gitbook/assets/1546192242493.png Binary files differ diff --git a/.gitbook/assets/1546192300405.png b/.gitbook/assets/1546192300405.png new file mode 100644 index 0000000..136370e --- /dev/null +++ b/.gitbook/assets/1546192300405.png Binary files differ diff --git a/.gitbook/assets/1546192397305.png b/.gitbook/assets/1546192397305.png new file mode 100644 index 0000000..2ecb8f9 --- /dev/null +++ b/.gitbook/assets/1546192397305.png Binary files differ diff --git a/.gitbook/assets/1546192501079.png b/.gitbook/assets/1546192501079.png new file mode 100644 index 0000000..e0e7810 --- /dev/null +++ b/.gitbook/assets/1546192501079.png Binary files differ diff --git a/.gitbook/assets/1546193899530.png b/.gitbook/assets/1546193899530.png new file mode 100644 index 0000000..e2e9888 --- /dev/null +++ b/.gitbook/assets/1546193899530.png Binary files differ diff --git a/.gitbook/assets/1546193952850.png b/.gitbook/assets/1546193952850.png new file mode 100644 index 0000000..df84fc8 --- /dev/null +++ b/.gitbook/assets/1546193952850.png Binary files differ diff --git a/.gitbook/assets/1546193969018.png b/.gitbook/assets/1546193969018.png new file mode 100644 index 0000000..e8e06da --- /dev/null +++ b/.gitbook/assets/1546193969018.png Binary files differ diff --git a/.gitbook/assets/1546193984129.png b/.gitbook/assets/1546193984129.png new file mode 100644 index 0000000..855bb6b --- /dev/null +++ b/.gitbook/assets/1546193984129.png Binary files differ diff --git a/.gitbook/assets/1546193989361.png b/.gitbook/assets/1546193989361.png new file mode 100644 index 0000000..9f5249c --- /dev/null +++ b/.gitbook/assets/1546193989361.png Binary files differ diff --git a/.gitbook/assets/1546195833078.png b/.gitbook/assets/1546195833078.png new file mode 100644 index 0000000..4794695 --- /dev/null +++ b/.gitbook/assets/1546195833078.png Binary files differ diff --git a/.gitbook/assets/1546195904221.png b/.gitbook/assets/1546195904221.png new file mode 100644 index 0000000..f64a317 --- /dev/null +++ b/.gitbook/assets/1546195904221.png Binary files differ diff --git a/.gitbook/assets/1546196212158.png b/.gitbook/assets/1546196212158.png new file mode 100644 index 0000000..06d20c4 --- /dev/null +++ b/.gitbook/assets/1546196212158.png Binary files differ diff --git a/.gitbook/assets/1546196782206.png b/.gitbook/assets/1546196782206.png new file mode 100644 index 0000000..440299b --- /dev/null +++ b/.gitbook/assets/1546196782206.png Binary files differ diff --git a/.gitbook/assets/1546196799209.png b/.gitbook/assets/1546196799209.png new file mode 100644 index 0000000..220e7a3 --- /dev/null +++ b/.gitbook/assets/1546196799209.png Binary files differ diff --git a/.gitbook/assets/1546200410450.png b/.gitbook/assets/1546200410450.png new file mode 100644 index 0000000..c3e0a3c --- /dev/null +++ b/.gitbook/assets/1546200410450.png Binary files differ diff --git a/.gitbook/assets/1546200430877.png b/.gitbook/assets/1546200430877.png new file mode 100644 index 0000000..29f454e --- /dev/null +++ b/.gitbook/assets/1546200430877.png Binary files differ diff --git a/.gitbook/assets/1546200467850.png b/.gitbook/assets/1546200467850.png new file mode 100644 index 0000000..818539a --- /dev/null +++ b/.gitbook/assets/1546200467850.png Binary files differ diff --git a/.gitbook/assets/1546200547561.png b/.gitbook/assets/1546200547561.png new file mode 100644 index 0000000..bd731da --- /dev/null +++ b/.gitbook/assets/1546200547561.png Binary files differ diff --git a/.gitbook/assets/1546200571261.png b/.gitbook/assets/1546200571261.png new file mode 100644 index 0000000..d9b2c85 --- /dev/null +++ b/.gitbook/assets/1546200571261.png Binary files differ diff --git a/.gitbook/assets/1546200603529.png b/.gitbook/assets/1546200603529.png new file mode 100644 index 0000000..ea23830 --- /dev/null +++ b/.gitbook/assets/1546200603529.png Binary files differ diff --git a/.gitbook/assets/1546200682136.png b/.gitbook/assets/1546200682136.png new file mode 100644 index 0000000..cf7e560 --- /dev/null +++ b/.gitbook/assets/1546200682136.png Binary files differ diff --git a/.gitbook/assets/1546201453749.png b/.gitbook/assets/1546201453749.png new file mode 100644 index 0000000..47eac1c --- /dev/null +++ b/.gitbook/assets/1546201453749.png Binary files differ diff --git a/.gitbook/assets/1546201460892.png b/.gitbook/assets/1546201460892.png new file mode 100644 index 0000000..bcb89ee --- /dev/null +++ b/.gitbook/assets/1546201460892.png Binary files differ diff --git a/.gitbook/assets/1546201707049.png b/.gitbook/assets/1546201707049.png new file mode 100644 index 0000000..739e194 --- /dev/null +++ b/.gitbook/assets/1546201707049.png Binary files differ diff --git a/.gitbook/assets/1546201717793.png b/.gitbook/assets/1546201717793.png new file mode 100644 index 0000000..74b9cbe --- /dev/null +++ b/.gitbook/assets/1546201717793.png Binary files differ diff --git a/.gitbook/assets/1546201910730.png b/.gitbook/assets/1546201910730.png new file mode 100644 index 0000000..55afeae --- /dev/null +++ b/.gitbook/assets/1546201910730.png Binary files differ diff --git a/.gitbook/assets/aibo.jpg b/.gitbook/assets/aibo.jpg new file mode 100644 index 0000000..f163bff --- /dev/null +++ b/.gitbook/assets/aibo.jpg Binary files differ diff --git a/.gitbook/assets/aibo2.png b/.gitbook/assets/aibo2.png new file mode 100644 index 0000000..a94af63 --- /dev/null +++ b/.gitbook/assets/aibo2.png Binary files differ diff --git a/.gitbook/assets/anydrive.jpg b/.gitbook/assets/anydrive.jpg new file mode 100644 index 0000000..bb86497 --- /dev/null +++ b/.gitbook/assets/anydrive.jpg Binary files differ diff --git a/.gitbook/assets/anymal.PNG b/.gitbook/assets/anymal.PNG new file mode 100644 index 0000000..a878756 --- /dev/null +++ b/.gitbook/assets/anymal.PNG Binary files differ diff --git a/.gitbook/assets/anymal2.png b/.gitbook/assets/anymal2.png new file mode 100644 index 0000000..56c5238 --- /dev/null +++ b/.gitbook/assets/anymal2.png Binary files differ diff --git a/.gitbook/assets/asimo.jpg b/.gitbook/assets/asimo.jpg new file mode 100644 index 0000000..a8c8ae2 --- /dev/null +++ b/.gitbook/assets/asimo.jpg Binary files differ diff --git a/.gitbook/assets/asimo_history.jpg b/.gitbook/assets/asimo_history.jpg new file mode 100644 index 0000000..3a017a7 --- /dev/null +++ b/.gitbook/assets/asimo_history.jpg Binary files differ diff --git a/.gitbook/assets/atlas.jpg b/.gitbook/assets/atlas.jpg new file mode 100644 index 0000000..43e8701 --- /dev/null +++ b/.gitbook/assets/atlas.jpg Binary files differ diff --git a/.gitbook/assets/bfi.jpeg b/.gitbook/assets/bfi.jpeg new file mode 100644 index 0000000..232b386 --- /dev/null +++ b/.gitbook/assets/bfi.jpeg Binary files differ diff --git a/.gitbook/assets/cassie.jpg b/.gitbook/assets/cassie.jpg new file mode 100644 index 0000000..d6fac02 --- /dev/null +++ b/.gitbook/assets/cassie.jpg Binary files differ diff --git a/.gitbook/assets/cassie_walking.jpeg b/.gitbook/assets/cassie_walking.jpeg new file mode 100644 index 0000000..94fb9a4 --- /dev/null +++ b/.gitbook/assets/cassie_walking.jpeg Binary files differ diff --git a/.gitbook/assets/e2-dr_1.png b/.gitbook/assets/e2-dr_1.png new file mode 100644 index 0000000..95d3a78 --- /dev/null +++ b/.gitbook/assets/e2-dr_1.png Binary files differ diff --git a/.gitbook/assets/e2-dr_2.jpeg b/.gitbook/assets/e2-dr_2.jpeg new file mode 100644 index 0000000..73de4fd --- /dev/null +++ b/.gitbook/assets/e2-dr_2.jpeg Binary files differ diff --git a/.gitbook/assets/e2-dr_3.jpeg b/.gitbook/assets/e2-dr_3.jpeg new file mode 100644 index 0000000..915b916 --- /dev/null +++ b/.gitbook/assets/e2-dr_3.jpeg Binary files differ diff --git a/.gitbook/assets/epson_scara-g1.jpg b/.gitbook/assets/epson_scara-g1.jpg new file mode 100644 index 0000000..d2c0be6 --- /dev/null +++ b/.gitbook/assets/epson_scara-g1.jpg Binary files differ diff --git a/.gitbook/assets/epson_scara-g10.jpg b/.gitbook/assets/epson_scara-g10.jpg new file mode 100644 index 0000000..9b89473 --- /dev/null +++ b/.gitbook/assets/epson_scara-g10.jpg Binary files differ diff --git a/.gitbook/assets/epson_scara-g20.jpg b/.gitbook/assets/epson_scara-g20.jpg new file mode 100644 index 0000000..2bdac50 --- /dev/null +++ b/.gitbook/assets/epson_scara-g20.jpg Binary files differ diff --git a/.gitbook/assets/epson_scara-g3.jpg b/.gitbook/assets/epson_scara-g3.jpg new file mode 100644 index 0000000..764483c --- /dev/null +++ b/.gitbook/assets/epson_scara-g3.jpg Binary files differ diff --git a/.gitbook/assets/epson_scara-g6.jpg b/.gitbook/assets/epson_scara-g6.jpg new file mode 100644 index 0000000..7faf993 --- /dev/null +++ b/.gitbook/assets/epson_scara-g6.jpg Binary files differ diff --git a/.gitbook/assets/examplev2.png b/.gitbook/assets/examplev2.png new file mode 100644 index 0000000..ddbb242 --- /dev/null +++ b/.gitbook/assets/examplev2.png Binary files differ diff --git a/.gitbook/assets/fetch.png b/.gitbook/assets/fetch.png new file mode 100644 index 0000000..68c2201 --- /dev/null +++ b/.gitbook/assets/fetch.png Binary files differ diff --git a/.gitbook/assets/freight_base.png b/.gitbook/assets/freight_base.png new file mode 100644 index 0000000..396c938 --- /dev/null +++ b/.gitbook/assets/freight_base.png Binary files differ diff --git a/.gitbook/assets/fsj1.png b/.gitbook/assets/fsj1.png new file mode 100644 index 0000000..e926d28 --- /dev/null +++ b/.gitbook/assets/fsj1.png Binary files differ diff --git a/.gitbook/assets/fsj2.png b/.gitbook/assets/fsj2.png new file mode 100644 index 0000000..8bdfe1e --- /dev/null +++ b/.gitbook/assets/fsj2.png Binary files differ diff --git a/.gitbook/assets/fsj3.png b/.gitbook/assets/fsj3.png new file mode 100644 index 0000000..e920a5f --- /dev/null +++ b/.gitbook/assets/fsj3.png Binary files differ diff --git a/.gitbook/assets/fsj4.png b/.gitbook/assets/fsj4.png new file mode 100644 index 0000000..6c330ee --- /dev/null +++ b/.gitbook/assets/fsj4.png Binary files differ diff --git a/.gitbook/assets/fusion.jpeg b/.gitbook/assets/fusion.jpeg new file mode 100644 index 0000000..e06d8a7 --- /dev/null +++ b/.gitbook/assets/fusion.jpeg Binary files differ diff --git a/.gitbook/assets/gila_monster.jpg b/.gitbook/assets/gila_monster.jpg new file mode 100644 index 0000000..b0707b8 --- /dev/null +++ b/.gitbook/assets/gila_monster.jpg Binary files differ diff --git a/.gitbook/assets/hardnet.png b/.gitbook/assets/hardnet.png new file mode 100644 index 0000000..3d4a3ce --- /dev/null +++ b/.gitbook/assets/hardnet.png Binary files differ diff --git a/.gitbook/assets/hermes.jpeg b/.gitbook/assets/hermes.jpeg new file mode 100644 index 0000000..20836e3 --- /dev/null +++ b/.gitbook/assets/hermes.jpeg Binary files differ diff --git a/.gitbook/assets/l2net.png b/.gitbook/assets/l2net.png new file mode 100644 index 0000000..067692a --- /dev/null +++ b/.gitbook/assets/l2net.png Binary files differ diff --git a/.gitbook/assets/laikago.png b/.gitbook/assets/laikago.png new file mode 100644 index 0000000..bead018 --- /dev/null +++ b/.gitbook/assets/laikago.png Binary files differ diff --git a/.gitbook/assets/laikago_spec.png b/.gitbook/assets/laikago_spec.png new file mode 100644 index 0000000..1a06795 --- /dev/null +++ b/.gitbook/assets/laikago_spec.png Binary files differ diff --git a/.gitbook/assets/laikagopushrecovery.png b/.gitbook/assets/laikagopushrecovery.png new file mode 100644 index 0000000..88210fa --- /dev/null +++ b/.gitbook/assets/laikagopushrecovery.png Binary files differ diff --git a/.gitbook/assets/mitcheetah1.jpeg b/.gitbook/assets/mitcheetah1.jpeg new file mode 100644 index 0000000..384f5bd --- /dev/null +++ b/.gitbook/assets/mitcheetah1.jpeg Binary files differ diff --git a/.gitbook/assets/mitcheetah1spec.jpeg b/.gitbook/assets/mitcheetah1spec.jpeg new file mode 100644 index 0000000..669ae97 --- /dev/null +++ b/.gitbook/assets/mitcheetah1spec.jpeg Binary files differ diff --git a/.gitbook/assets/mitcheetah2.png b/.gitbook/assets/mitcheetah2.png new file mode 100644 index 0000000..65cf871 --- /dev/null +++ b/.gitbook/assets/mitcheetah2.png Binary files differ diff --git a/.gitbook/assets/mitcheetah3.jpg b/.gitbook/assets/mitcheetah3.jpg new file mode 100644 index 0000000..d2534f6 --- /dev/null +++ b/.gitbook/assets/mitcheetah3.jpg Binary files differ diff --git a/.gitbook/assets/mitcheetah3cpg.png b/.gitbook/assets/mitcheetah3cpg.png new file mode 100644 index 0000000..0159876 --- /dev/null +++ b/.gitbook/assets/mitcheetah3cpg.png Binary files differ diff --git a/.gitbook/assets/ocean_one.jpg b/.gitbook/assets/ocean_one.jpg new file mode 100644 index 0000000..186f9bd --- /dev/null +++ b/.gitbook/assets/ocean_one.jpg Binary files differ diff --git a/.gitbook/assets/ocean_one_2.png b/.gitbook/assets/ocean_one_2.png new file mode 100644 index 0000000..424e325 --- /dev/null +++ b/.gitbook/assets/ocean_one_2.png Binary files differ diff --git a/.gitbook/assets/ppfnet.png b/.gitbook/assets/ppfnet.png new file mode 100644 index 0000000..2b76752 --- /dev/null +++ b/.gitbook/assets/ppfnet.png Binary files differ diff --git a/.gitbook/assets/robonaut2-strongman.png b/.gitbook/assets/robonaut2-strongman.png new file mode 100644 index 0000000..a86756c --- /dev/null +++ b/.gitbook/assets/robonaut2-strongman.png Binary files differ diff --git a/.gitbook/assets/robonaut2.jpg b/.gitbook/assets/robonaut2.jpg new file mode 100644 index 0000000..903dbbc --- /dev/null +++ b/.gitbook/assets/robonaut2.jpg Binary files differ diff --git a/.gitbook/assets/roboseum.jpg b/.gitbook/assets/roboseum.jpg new file mode 100644 index 0000000..e1cbbee --- /dev/null +++ b/.gitbook/assets/roboseum.jpg Binary files differ diff --git a/.gitbook/assets/robotic_algorithm.png b/.gitbook/assets/robotic_algorithm.png new file mode 100644 index 0000000..553c86f --- /dev/null +++ b/.gitbook/assets/robotic_algorithm.png Binary files differ diff --git a/.gitbook/assets/scara_configuration.png b/.gitbook/assets/scara_configuration.png new file mode 100644 index 0000000..ef08fc3 --- /dev/null +++ b/.gitbook/assets/scara_configuration.png Binary files differ diff --git a/.gitbook/assets/sea_snake.jpeg b/.gitbook/assets/sea_snake.jpeg new file mode 100644 index 0000000..536b739 --- /dev/null +++ b/.gitbook/assets/sea_snake.jpeg Binary files differ diff --git a/.gitbook/assets/sea_snake2.png b/.gitbook/assets/sea_snake2.png new file mode 100644 index 0000000..03cf276 --- /dev/null +++ b/.gitbook/assets/sea_snake2.png Binary files differ diff --git a/.gitbook/assets/snake_arm_robot.png b/.gitbook/assets/snake_arm_robot.png new file mode 100644 index 0000000..e73a630 --- /dev/null +++ b/.gitbook/assets/snake_arm_robot.png Binary files differ diff --git a/.gitbook/assets/snake_arm_robot2.png b/.gitbook/assets/snake_arm_robot2.png new file mode 100644 index 0000000..51cc08b --- /dev/null +++ b/.gitbook/assets/snake_arm_robot2.png Binary files differ diff --git a/.gitbook/assets/snake_monster.jpeg b/.gitbook/assets/snake_monster.jpeg new file mode 100644 index 0000000..7c44be4 --- /dev/null +++ b/.gitbook/assets/snake_monster.jpeg Binary files differ diff --git a/.gitbook/assets/snake_monster2.jpeg b/.gitbook/assets/snake_monster2.jpeg new file mode 100644 index 0000000..eccdcbc --- /dev/null +++ b/.gitbook/assets/snake_monster2.jpeg Binary files differ diff --git a/.gitbook/assets/t-hr3-torque-servo.jpg b/.gitbook/assets/t-hr3-torque-servo.jpg new file mode 100644 index 0000000..150f518 --- /dev/null +++ b/.gitbook/assets/t-hr3-torque-servo.jpg Binary files differ diff --git a/.gitbook/assets/t-hr3.jpg b/.gitbook/assets/t-hr3.jpg new file mode 100644 index 0000000..4bd7edc --- /dev/null +++ b/.gitbook/assets/t-hr3.jpg Binary files differ diff --git a/.gitbook/assets/t-hr3_2.jpg b/.gitbook/assets/t-hr3_2.jpg new file mode 100644 index 0000000..6772494 --- /dev/null +++ b/.gitbook/assets/t-hr3_2.jpg Binary files differ diff --git a/Actuators/README.md b/Actuators/README.md new file mode 100644 index 0000000..b8365c9 --- /dev/null +++ b/Actuators/README.md @@ -0,0 +1,2 @@ +# Actuators + diff --git a/Actuators/a-detailed-look-into-sea-serial-elastic-actuator.md b/Actuators/a-detailed-look-into-sea-serial-elastic-actuator.md new file mode 100644 index 0000000..7cb1a93 --- /dev/null +++ b/Actuators/a-detailed-look-into-sea-serial-elastic-actuator.md @@ -0,0 +1,28 @@ +# A Detailed Look into SEA\(Serial Elastic Actuator\) + +## why we need to know this + +SEA is a commonly used actuator. It is powerful, a lot of research and product are based on SEA. For example, the Rethink Robotcs widely use SEA in their colaborate robot, Hebi Robotics use SEA in their modules as well. + + ![](https://spectrum.ieee.org/image/MjYxMjM2MQ.jpeg) + Rethink Sawyer Robot, All the joints are SEA + + ![](http://docs.hebi.us//resources/quickstart/two_actuators.jpg) + Hebi Modules + +More importantly, I am recently dealing with SEA motors from HEBI robotics. I have got a new [Daisy Robot](http://docs.hebi.us/resources/kits/assyInstructions/X-Series_Hexapod.pdf). The Robot is heavy and it trembles a lot while walking. So I want to make it crystal clear that I understand the motor in the very basic level. + +This also help understanding the famous branch of actuator. + +## What is SEA + +SEA is a mechanical mechanism. It could add to any kind of actuators if properly designed. To put it simple, it attach a spring bwtween the end shaft of the motor, to create elasticity and reduce stiffness of the motor. + +## A little bit of history + +The famous [paper from Gill Pratt MIT](http://www.cs.cmu.edu/~cga/legs/jh1c.pdf). + +## How does this work and Why this works + +## Pros & Cons + diff --git a/Actuators/mit-optimal-actuator-design.md b/Actuators/mit-optimal-actuator-design.md new file mode 100644 index 0000000..f7242ab --- /dev/null +++ b/Actuators/mit-optimal-actuator-design.md @@ -0,0 +1,4 @@ +# MIT Optimal Actuator Design + +tag: _MIT_ _BioMimetic Robotics Lab_ _Actuator_ + diff --git a/Robots/README.md b/Robots/README.md new file mode 100644 index 0000000..3d635c4 --- /dev/null +++ b/Robots/README.md @@ -0,0 +1,2 @@ +# Robots + diff --git a/Robots/asimo-new-tag-structure.md b/Robots/asimo-new-tag-structure.md new file mode 100644 index 0000000..6e9989b --- /dev/null +++ b/Robots/asimo-new-tag-structure.md @@ -0,0 +1,89 @@ +# Asimo new tag structure + +Company/University/Lab/Indivisual tag:_Honda 本田_ + +Application tag: _服务_ + +Environment tag: _地面_ _室内_ + +Mechanical Characteristic tag:_人形_ _双臂_ _双足_ _谐波减速器_ + +Software Characteristic tag: _ZMP_ + +* 图1 ![ASIMO](../.gitbook/assets/asimo.jpg) +* 图2 ![ASIMO\_HISTORY](../.gitbook/assets/asimo_history.jpg) + +ASIMO是Advanced Step in Innovative Mobility的简称。由日本的汽车厂商巨头本田历经多年研发而成,是日本人形机器人中最尖端的一个,也是最出名,最上镜的机器人。最初以「创造能够与社会共存,协调,并赋予人类社会以新价值的机器人」为愿景,由一群充满热情的本田公司的技术专家,在1986年开始了双足行走机器人的研发。最近虽然没什么新闻了,但是听说本田一直有在持续研发,还开始了抢险救灾版本的ASIMO的研发。 + +ASIMO能够根据周围的人的行动而自行移动。具有较强的双足自平衡能力。能够利用多个传感器对外界进行精确的感知,并作出复杂的行为。并且ASIMO的手臂拥有足够多的自由度,使得它能够作出许多复杂,拟人的手部动作。 + +## 硬件 + +### 2011年11月时的公开的性能表: + +#### 尺寸 + +* 身高 130cm +* 宽度 45cm +* 深度 34cm +* 重量 48kg + + **性能** + +* 最大速度 9km/h +* 活动时间 40分钟(步行时)可用自动充电功能进行连续活动 + + **关节自由度** + +* 头部 3 自由度 +* 腕部 7×2 自由度 +* 手部 13×2 自由度 +* 腰部 2 自由度 +* 脚步 6 自由度 +* 总计 57自由度 + +#### 抓握力 + +* 0.5kg/手(五指抓握的状态下) + + **致动器** + +* 伺服电机 + 谐波减速器 + 驱动器 + +#### 传感器 + +* 脚部: 6轴力传感器(可感知力在脚掌中的分布) +* 腰部: 陀螺仪 + 加速度传感器 + +## 应用场景 + +家居服务,接待,导游等 + +### ASIMO的技能表: + +* 现场解说 +* 端茶送水 +* 预测行人的行走方向,并自动避开 +* 三个人同说说话时识别谁说了什么 +* 迎接客人并引导 +* 以9km/h的速度行走 +* 双足起跳 +* 在有凹凸起伏的路上行走 +* 单脚跳 +* 脚尖射门 +* 拿起水瓶,拧开盖子,并倒进纸杯 +* 手语 +* 人脸识别 + +## 拓展 + +[机器人链接](http://www.honda.co.jp/ASIMO/) + +[ASIMO英文解说视频](https://www.youtube.com/watch?v=JlRPICfnmhw) + +[ASIMO进化史视频](https://www.youtube.com/watch?v=cqL2ZvZ-q14) + +[ASIMO公开的技术资料](http://asimo.honda.com/downloads/pdf/asimo-technical-information.pdf) + +[ASIMO公开的技术资料2](http://asimo.honda.com/asimo-specs/) + diff --git a/Robots/collaborative-transportation.md b/Robots/collaborative-transportation.md new file mode 100644 index 0000000..4d003e5 --- /dev/null +++ b/Robots/collaborative-transportation.md @@ -0,0 +1,8 @@ +# collaborative transportation + +tag: _ETHZ_ _aslteam_ _多机协作_ _四足_ _四旋翼_ + +## 拓展 + +[视频](https://www.youtube.com/watch?v=9PprNdIKRaw) + diff --git a/Robots/epson-scara-robot-overview.md b/Robots/epson-scara-robot-overview.md new file mode 100644 index 0000000..552e5b4 --- /dev/null +++ b/Robots/epson-scara-robot-overview.md @@ -0,0 +1,32 @@ +# Epson SCARA Robot Overview + +tag: _Epson SCARA_ _机械臂_ _执行_ _机器人部件_ + +* Epson G1 ![Epson G1](../.gitbook/assets/epson_scara-g1.jpg) +* Epson G3 ![epson\_scara-G3](../.gitbook/assets/epson_scara-g3.jpg) +* Epson G6 ![epson\_scara-G6](../.gitbook/assets/epson_scara-g6.jpg) +* Epson G10 ![epson\_scara-G10](../.gitbook/assets/epson_scara-g10.jpg) +* Epson G20 ![epson\_scara-G20](../.gitbook/assets/epson_scara-g20.jpg) + +Epson在机器人领域是做的比较好的一家。经常会听到Epson Robot和安川,发那科等公司一起出现。 + +他们也是一家做机械臂和控制器解决方案的公司。其实 他们家很著名的就是这款Scara机械臂。Scara(Selective Compliance Assembly Robot Arm or Selective Compliance Articulated Robot Arm)的意思是选择性柔性平面机器人,就是这个机械臂的每个关节在XY平面方向上是带有一定柔性(可Back Drive),在Z方向上是完全刚性的。 + +Scara机构示意图: ![scara 示意图](../.gitbook/assets/scara_configuration.png) + +这种机械臂的好处是可以在XY平面运动的非常快(由于运动速度叠加) + +## 硬件 + +由于硬件款式较多,先挖个坑之后补(2017.11.13) + +## 应用场景 + +码垛,生产线搬运,较精密移动零件。 + +## 拓展 + +[Epson Scara Robot 官网](http://robots.epson.com/products/1) + +[wiki-Scara](https://en.wikipedia.org/wiki/SCARA) + diff --git a/Robots/fetch-ji-qi-ren-ping-tai.md b/Robots/fetch-ji-qi-ren-ping-tai.md new file mode 100644 index 0000000..c0aa618 --- /dev/null +++ b/Robots/fetch-ji-qi-ren-ping-tai.md @@ -0,0 +1,39 @@ +# Fetch机器人平台 + +tag: _Fetch Robotics_ _机械臂_ _轮式_ _地面_ + +author: gzy @ Dec.2018 + +* Freight移动机器人 ![Freight](../.gitbook/assets/freight_base.png) +* Fetch & Freight ![Fetch](../.gitbook/assets/fetch.png) + +Fetch Robotics成立于2014,2017年12月的B轮融资$25M。其核心服务:快速部署自动仓储,ioT云技术。为此团队设计了各种各样能够连上云端,进行远程控制的移动平台,和打造了一个云服务平台。 + +而我们关心的是其机器人产品:Freight移动机器人和Fetch机械臂。这家公司的独特之处在于拥抱开源和学术界。他们将其中两款产品写了驱动,做了文档,把平台开放出来让大家开发,这就吸引很多公司和实验室用他们的平台来进行机器人算法开发。 + +看起来这家公司相当有做硬件的实力,机械底盘谁都能造,但是自己造机械臂还是要勇气的。这家的产品和pr2很类似,而willow garage已经跪了。现在卖机器人是赚不了什么钱的,只能想办法和行业靠拢,做仓储物流平台像是正确的思路,公司能融到25M说明还是有市场可以赚钱。下次可以介绍一下Fetch的竞争对手,从Willow Garage脱离出来的unbounded Robotics,还有Toyota Human Support Robot,做的产品简直一模一样。 + +## 硬件 + +详细的硬件的参数可以在下面的拓展中有详细链接。这里主要列举几个基本量。 + +* 身高 1.1m +* 机械臂 7 DoF + 末端 1 DoF +* 头部 2 Dof +* 头部 RGBD相机 +* 底盘 3D线激光雷达 +* 底盘 6轴IMU +* 电池续航 8h + +## 应用场景 + +家居服务,主要用于科研 + +## 拓展 + +[机器人平台文档](http://docs.fetchrobotics.com/introduction.html),这个做的相当详细,毕竟拿出来卖的产品,还是下了功夫了。ROS的Navigation那一套和MoveIt那一套都可以拿来用,做research上手相当方便。 + +[机器人平台的主要参数](https://fetchrobotics.com/wp-content/uploads/2018/04/Fetch_robot_spec_overview.pdf) + +[机器人平台文章2012](https://fetchrobotics.com/wp-content/uploads/2018/04/Fetch-and-Freight-Workshop-Paper.pdf) + diff --git a/Robots/fusion-yuan-cheng-cao-zuo-de-ji-sheng-ji-qi-ren.md b/Robots/fusion-yuan-cheng-cao-zuo-de-ji-sheng-ji-qi-ren.md new file mode 100644 index 0000000..bfee3cf --- /dev/null +++ b/Robots/fusion-yuan-cheng-cao-zuo-de-ji-sheng-ji-qi-ren.md @@ -0,0 +1,28 @@ +# Fusion-远程操作的寄生机器人 + +tag: _Keio University_ _协作机器人_ + +![Fusion](../.gitbook/assets/fusion.jpeg) + +## 实验室 + +Fusion是一个依附在人(surrogate)身上的人机协作系统,虽然目前做的非常初期,demo效果还是很振奋人心,前景非常好。 + +## 硬件 + +* 一个背包,包括通信、控制、供电系统 +* 两个机械臂,各6个自由度 +* 两个机械手,来源不详 +* 一个头,包括3自由度的主动减震,双目立体视觉,双耳听觉 +* 支撑头部的平台 +* 总重量不详,应该不轻,背着很费劲 + +## 应用 + +* 支持利用Oculus Rift对机器人进行远程操控 +* 远程教学和交互 + +## 链接 + +* [Fusion官方视频](https://www.youtube.com/watch?time_continue=145&v=Nrc7gH6dydw) + diff --git a/Robots/giacometti-arm-with-balloon-body.md b/Robots/giacometti-arm-with-balloon-body.md new file mode 100644 index 0000000..7cc29ce --- /dev/null +++ b/Robots/giacometti-arm-with-balloon-body.md @@ -0,0 +1,8 @@ +# Giacometti Arm with Balloon Body + +tag: _Suzumori Endo Lab_ _地面_ _空中_ _机械臂_ + +长度20m的超长超轻充气机械臂 + +[Giacometti Arm with Balloon Body](https://www.youtube.com/watch?v=INTHRNcyW9w) + diff --git a/Robots/hermes-yao-cao-zuo-ji-qi-ren.md b/Robots/hermes-yao-cao-zuo-ji-qi-ren.md new file mode 100644 index 0000000..422d29f --- /dev/null +++ b/Robots/hermes-yao-cao-zuo-ji-qi-ren.md @@ -0,0 +1,40 @@ +# HERMES 遥操作机器人 + +tag: _MIT_ _BioMimetic Robotics Lab_ _双足_ + +![Aibo](../.gitbook/assets/hermes.jpeg) + +## 硬件 + +* 成年人的9成高 +* 24 DoF +* 45kg +* _high-torque-density electric actuator_,该实验室还做高能量密度的电机,正好用于Hermes。这款电机动态性高,瞬时输出功率大。因此hermes可以做非常暴力的“击穿木板”的动作。 +* 自己开发的基于数传的视觉反馈系统 +* 自己开发的带_躯干姿态反馈_的遥操作系统(可以从demo视频看到身体的倾斜会施加给操纵者) +* 全是自己开发的好强…感觉全是2个PhD的工作量 + +## 应用场景 + +Balance Feedback Interface\(BFI\) + +> This strategy means that if the robot is about to lose balance, then the BFI attempts to pull the human out of balance as well. In this case, we hypothesize that the BFI is to trigger human’s natural response to disturbances. + +![Aibo](../.gitbook/assets/bfi.jpeg) + +这是设想的机器人平衡方式,通过机器人身上的传感器反馈来拉扯人,使人也产生相同的不平衡。复制人所做出的自然反应施加给机器人,使机器人保持平衡。这也就是标题所说的 + +> MIT Robot Steals Human Brains to Help It Balance + +然而这一功能还在开发,希望将其完善到6DoF的全面反馈。 + +目前机器人还不能行走,可以进行遥操作击穿木板,拿斧头劈门,拿灭火器灭火这样的工作。 + +## 拓展 + +[MIT 官方 demo](https://www.youtube.com/watch?time_continue=160&v=2-5n2IsdCqU) + +[官方介绍视频](https://www.youtube.com/watch?time_continue=196&v=p8ozov_xymM) + +J. Ramos, A. Wang, and S. Kim, "Robot-Human Balance State Transfer during Full-Body Humanoid Teleoperation Using Divergent Component of Motion Dynamics" in Robotics and Automation \(ICRA\), 2016 IEEE International Conference on. IEEE, 2016. + diff --git a/Robots/mit-cheetah.md b/Robots/mit-cheetah.md new file mode 100644 index 0000000..da60efe --- /dev/null +++ b/Robots/mit-cheetah.md @@ -0,0 +1,147 @@ +# MIT Cheetah + +tag: _MIT_ _BioMimetic Robotics Lab_ _四足_ _地面_ + +author: gzy @ Dec. 2018 + +![Cheetah1](../.gitbook/assets/mitcheetah1.jpeg) + +* Cheetah1 + +![Cheetah2](../.gitbook/assets/mitcheetah2.png) + +* Cheetah2\(上面的蜜汁蟒蛇状物体其实是护罩管+猎豹花纹…\) + +![Cheetah3](../.gitbook/assets/mitcheetah3.jpg) + +* Cheetah3 + +The Cheetah robot是MIT Biomimetic Robotics Lab开发的一款四足机器人。从推出第一款至今,MIT的Cheetah可以说是MIT驰名的一个机器人品牌。 + +实验室老板Sangbae Kim老哥本科毕业自延世大学,中期还当过两年兵。之后就斯坦福,哈佛,MIT各种飞。 + +## Cheetah1 + +2009年推出Cheetah1,机器人被固定在跑道上,只能前后移动和pitch,不能roll或者yaw,达到了速度22km/h(比我跑3km的均速还快…)。从实验视频中可以看到,单腿有两个自由度,两个电机并列放在肩膀位置,通过连杆控制两个自由度。这个机器人的试验中还验证了跑步提速过程中**四足步态切换**。(TODO:常用四足步态介绍) + +![Cheetah1](../.gitbook/assets/mitcheetah1spec.jpeg) + +Cheetah1 Spec + +Cheetah1主要的目的在于节省跑步能量,上面还有很多技术尝试。比如regenerative motor driver,利用能量存储再生的方式减小跑步过程的动能损失。腿上连接了Kevlar\(防弹衣材料\) tendor来减小撞地时腿上的冲击。身体前后两部分由中空脊柱连接,脊柱还差分地连上了tendor。(TODO:这部分比较混乱,之后会找文章扩充,求讨论!)。总之是实现了非常强大的生物机理的模仿和机械设计。 + +TODO: Cheetah1的控制方法:virtual spring control + +## Cheetah1参数 + +* 最快22km/h +* 功率1Kw +* COT \(defined as power consumption divided by weight times velocity\), of 0.52. The team says this COT performance rivals that of running animals of the same size. By comparison, Honda's Asimo humanoid has a COT of 2 and Boston Dynamics' BigDog has a COT of 15. +* 普通市面上的电机 + +## Cheetah2 + +2015ICRA上发表了Cheetah2,其设计早在2010年就已经开始了。Sangbae Kim老哥显然很擅长搞电机,Cheetah机器人仍然是全电动,这电机算是该实验室的核心技术了。 + +这次的版本摒弃了柔性脊柱的设计,整个身体成为一个刚体,跑着看起来非常笨重吃力。\(不明白这里的设计思路转变,可能刚体更好控,机械参数更好调,更不容易坏。可以想象得出来Cheetah1的时候设计机械结构踩了很多坑,什么线驱动,多自由度脊柱这样的设计,一看就是费死劲了才work。之后的版本再也没有出现过柔性脊柱。\) + +Cheetah2用上了实验室自研的电机,机器人本体重了很多,可以看得出来电机payload大大增加了。 + +## Cheetah2的相关控制算法 + +### 跑步力控算法 + +跑步的步态用到了impulse planning,这个方法是跑步时规划步态的核心。因为跑步时前腿和后腿分别接触地面,计算腿需要给地面多大的冲量来使得前后两个冲量在跑步时使机器人身体前后摆动保持动态平衡。当跑步速度加快时,接触地面时间变短,同样的动作需要以更大的力冲击地面以得到同样的冲量保持平衡。所以**速度越快电机需要的力越大**。 + +### 跳跃避障算法: + +头顶激光雷达,检测障碍物。在障碍物到来前快速规划,调整步幅,使前脚踩在障碍物前。蹬前脚起身,蹬后脚抬起身体高度。依靠速度飞过障碍物。这样就可以了,是不是很简单呢。关键在于,激光雷达要提前得到障碍物信息,反应时间估计不到1s。调整步态的时间更短,调整完就得立马起跳。要越过40cm障碍物,身高的一半,需要电机给出很大的冲力。说到底**感知决策控制硬件**都很厉害。[这个官方视频](https://www.youtube.com/watch?v=_luhn7TLfWU)简单介绍了跳跃避障算法,值得一看。 + +TODO: + +### 低阻抗设计:无需末端力传感器的力控方案? + +low-impedance transmission: Low inertia, low friction, where there’s no stiffness. The inertia and the friction is most of it. + +The Cheetah2 trying to maximize the force bandwidth; how quickly you can change force and at the same time maximize the transparency. Transparency means the mechanical impedance between the actuator force to the end effector + +All the energy can transfer to the motor side because there’s very little inertia, very little impedance + +### 节省能量消耗的可能做法: + +one way to save energy is adding parallel springs, not series elastic. Series elastic doesn’t really save force generation, but parallel springs do. We actually have a paper about adding parallel springs. + +## Cheetah2参数 + +* 持续奔跑速度22km/h +* 每条腿2个自由度\(2自由度的腿转弯会很蛋疼,视频里跑着饶了一个半径极大的圈才实现转弯\) +* 33kg +* 1m长 +* 80cm高 +* +视觉反馈可达到60km/h的速度(刘翔啊…),the vision sensor detect the ground height to have the contact angle of the leg be accurate +* 有Lidar用以避障,可以检测障碍物,规划步态并且跳过去 +* 可以越过33cm的障碍物 + +## Cheetah3 + +Cheetah3的造型就很轻盈时尚,很2018了。看到是视频感到各种设计紧凑,动作灵活,太强了。这个设计理念感觉很受Boston Dynamics的SpotMini的启发(虽然只差了1年时间) + +## Cheetah3控制方法 + +官网上挂了一个空的链接标题是Model Predictive Control for Cheetah 3,说明很快会发布MPC的控制方法? + +### 稳定过障碍CPG\(Central Pattern Generator\) + +大部分时间四脚着地,左前右后和左后右前交替往前。该步态已经可以用于行走障碍物堆和爬楼梯。[具体效果视频](https://www.youtube.com/watch?v=0RoySGaJNho)。 ![Cheetah3 CPG](../.gitbook/assets/mitcheetah3cpg.png) + +CPG循环图 + +### 跳跃步态 + +很好奇双脚动态交替跳是怎么实现的,这个容我日后实现一个双脚交替跳的步态再来填充。这个应该在Marc的书里有解答。 + +## Cheetah3参数 + +* 每条腿3个自由度\(相比于前代有很大进步,可以灵巧转身\) +* 依然所有电机都在肩膀关节,而且传动采用了骚气的链传动。\(虽然机械效率低了点,但是传动比稳定可靠,不会错齿,做这个的小哥肯定很会修自行车🚲\) +* 腿部膝关节可以变换弯曲方向 +* 重量:32kg + +### 能力 + +* 稳定步态走路 +* 稳定步态爬楼梯 +* 双脚交替跳 +* 双脚交替跳前进 +* 双脚交替跳爬楼梯 +* push recovery(TODO:这是如何实现的?) +* 跳上76cm的桌子 + +## 拓展 + +[Cheetah1 switch running gait](https://www.youtube.com/watch?time_continue=123&v=UBHJqnM8RTU) + +[Cheetah1 Leg Swing Test](https://www.youtube.com/watch?v=v-sl6VbnUxc), 这个2010年的实验可以看到猎豹单条腿是怎么动的,以及肩关节上的同轴二自由度BLDC电机。 + +[Cheetah1 Press Coverage](https://spectrum.ieee.org/automaton/robotics/robotics-hardware/mit-cheetah-robot-running) + +[Cheetah1 Paper](https://ieeexplore.ieee.org/document/6631038) + +还有这个实验室引以为傲的[Cheetah电机介绍](https://biomimetics.mit.edu/research/optimal-actuator-design),这是人家的核心科技 + +[Cheetah2 Official Website](https://biomimetics.mit.edu/research/dynamic-locomotion-mit-cheetah-2) + +[Cheetah2 Press Inteview on ICRA2015](https://robohub.org/robots-cheetah-2/),I think this is very useful since Prof. Kim explained a lot of concept in the inteview. + +[Cheetah3 Video](https://www.youtube.com/watch?time_continue=11&v=QZ1DaQgg3lE) + +[Cheetah3 Press Coverage](http://news.mit.edu/2018/blind-cheetah-robot-climb-stairs-obstacles-disaster-zones-0705) + +TODO: add related paper and read thru. + +## 讨论 + +Sangbae Kim说电机比液压要好。但是,传统电机不好,His group developed its own "three phase permanent magnet synchronous motor," which reportedly doubles the torque density of the commercial motors they were using in the robot!! 给老哥跪下了。再次验证了我的结论,牛逼的机器人一定有牛逼的硬件,牛逼的硬件一定有牛逼的电机支持。所以归根结底还是电机,电机弱的机器人没有希望。该Lab电机的介绍[在这里](https://github.com/thu-skyworks/Roboseum/blob/master/Actuators/MIT%20Optimal%20Actuator%20Design.md)。 + +此外,官网上还有关于给狗身上加尾巴的研究,这个和CMU的Robomechanics Lab做的事情不谋而合,想利用尾巴对机器人保持平衡。这学期还看到他们lab的同学抬着摔坏的Minitaur回实验室,说是从2m高的台子上跳下来跪了… + diff --git a/Robots/ocean-one.md b/Robots/ocean-one.md new file mode 100644 index 0000000..122f8e2 --- /dev/null +++ b/Robots/ocean-one.md @@ -0,0 +1,23 @@ +# Ocean One + +tag: _Stanford Robotics Lab_ _人形_ _水下_ _探索_ + +![Ocean One](../.gitbook/assets/ocean_one.jpg) + +![Ocean One 2](../.gitbook/assets/ocean_one_2.png) + +斯坦福大学机器人实验室开发的人形水下遥操作机器人。上半身像人形,下半身像传统水下机器人。 + +first come out: 2016 + +* 头部有两个自由度和双目视觉 +* 双臂各有7自由度,力反馈,弹性串联驱动 +* 手部是欠驱动抓手,具有触觉和摄像头 +* 身体共有8个推进器,以及用于探索,导航,姿态估计用的多个广角摄像头,多普勒测速计,气压计等许多传感器 + +Video URL: [Youtube](https://www.youtube.com/watch?v=p1HmgP9l4VY) + +News URL: [IEEE](https://spectrum.ieee.org/automaton/robotics/humanoids/stanford-humanoid-submarine-robot) + +Official Site URL: [cs.stanford.edu](http://cs.stanford.edu/group/manips/ocean-one.html) + diff --git a/Robots/sarcos-robotics-overview.md b/Robots/sarcos-robotics-overview.md new file mode 100644 index 0000000..c6320a5 --- /dev/null +++ b/Robots/sarcos-robotics-overview.md @@ -0,0 +1,8 @@ +# Sarcos Robotics Overview + +tag: _Sarcos_ _蛇形_ _外骨骼操作_ _轮式_ _机械臂_ + +## 拓展 + +[视频](https://www.youtube.com/watch?v=DfYAvWIfhYY) + diff --git a/Robots/t-hr3-humanoid-robot.md b/Robots/t-hr3-humanoid-robot.md new file mode 100644 index 0000000..82acf55 --- /dev/null +++ b/Robots/t-hr3-humanoid-robot.md @@ -0,0 +1,79 @@ +# T-HR3 Humanoid Robot + +tag: _Toyota 丰田_ _人形_ _外骨骼_ _虚拟现实_ + +![T-HR3](../.gitbook/assets/t-hr3.jpg) + +![T-HR3](../.gitbook/assets/t-hr3_2.jpg) + +### 一些info: + +1. honda=本田,toyota=丰田 +2. toyota于2017年11月最新出的人形机器人 + +### 机器人的优点: + +1. 力矩伺服。这个做的非常棒,机器人的关节有应变式的力矩传感 + +![T-HR3](../.gitbook/assets/t-hr3-torque-servo.jpg) 图:力矩执行器的机械结构 + +这是Toyota和Tamagawa Seiki and Nidec Copal Electronics合作的关节,用于测量力矩。该关节同时用于T-HR3和Master Maneuvering System. 2. 远程控制(Master Maneuvering System, MMS\). VR+关节力反馈+数据手套,整体方案做的非常棒. 更有猜测,可以利用示教的数据作为机器学习的数据,用以学习控制机器人的策略。 + +### 机器人的问题: + +1. 因为人的姿态和机器人姿态不是完美对应,有一个bias,所以人和机器人不能同时操作同一个物品。 +2. 机器人各种抖抖抖,传感器拿到的值也不是特别的精确,或者说没有很好的滤波,所以电机得到的指令一直在tremble。 +3. 机器人的操作一直慢半拍。不知道是通讯时延还是数据处理的问题,又或者是机器人执行速度更不上(毕竟有力控,执行速度会比较慢)。时延十分明显,可以达到0.5甚至1s。毕竟那么多关节的实时同步控制太难。 +4. 从demo中单腿支撑恢复双腿站立的瞬间,上身明显晃动,足以判断脚底没有压力传感。 +5. 机器人甚至连一步都没有走,显然是怕摔,主要精力可能都放在上半身了。而遥操作的座椅不支持控制行走步态,原因可以参考:[知乎:为何双足机器人不直接拷贝人类行动时的数据?](https://www.zhihu.com/question/65813578) + + 所以这个人形机器人就很尴尬,只能模仿上肢动作而不能移动。 + +## 硬件 + +#### 尺寸 + +* 身高 152cm +* 重量 74.8kg \(比我重比我矮…\) + +#### 关节自由度 + +* 头部 2 自由度 +* 手臂 \(3+1+3\)×2=14 自由度 +* 灵巧手 1×2 自由度(??) +* 腰部 3 自由度 +* 腿脚 6×2 自由度 +* 总计 32 自由度 + +#### 遥操作自由度(手臂+手,带力反馈) + +* 肩膀 3×2 自由度 +* 肘 1×2 自由度 +* 手腕 3×2 自由度 +* 手 1×2 自由度(??) +* 总计 16个关节传感 + +#### 传感器 + +* 头部:双目视觉(可能只是用于传输数据给HTC VIVE) +* 关节:力矩传感 + +#### 远程控制(Master Maneuvering System, MMS\) + +利用了HTC VIVE的虚拟现实来实时展现机器人看到的图像,并且通过MMS的位置传感来控制机器人。机器人将得到的力矩反馈通过MMS来施加在操作者身上,这是一个非常棒的解决方案。但是问题也不少,见开头。 + +## 应用场景 + +Toyota官方称 "this is a platform with capabilities that can safely assist humans in a variety of settings, such as the home, medical facilities, construction sites, disaster-stricken areas and even outer space."" + +### T-HR3的技能表: + +* 较低时延的遥操作机器人 +* 单脚站立做各种姿势(预先编程确定各关节角然后执行) + +## 拓展 + +[demo视频](https://www.youtube.com/watch?v=GTw7q3-Bn6M) + +[报道(含更多视频)](https://www.google.com.hk/url?sa=t&rct=j&q=&esrc=s&source=web&cd=7&ved=0ahUKEwi49bDhgtzXAhUN2WMKHXN-Df8QFghBMAY&url=https%3a%2f%2fwww.engadget.com%2f2017%2f11%2f21%2ftoyota-t-hr3-robot%2f&usg=AOvVaw2D1Qf048U6b_buHHDM9Dbf) + diff --git a/SUMMARY.md b/SUMMARY.md new file mode 100644 index 0000000..6fa179e --- /dev/null +++ b/SUMMARY.md @@ -0,0 +1,132 @@ +# Table of contents + +* [Roboseum](README.md) +* [Robots](robots/README.md) + * [Modular\_Snake](robots/modular_snake.md) + * [Atlas](robots/atlas.md) + * [ANYdrive](robots/anydrive.md) + * [Snake\_Arm\_Robot](robots/snake_arm_robot.md) + * [Asimo](robots/asimo%20%281%29.md) + * [Asimo new tag structure](robots/asimo-new-tag-structure.md) + * [Cassie](robots/cassie.md) + * [SpotMini](robots/spotmini.md) + * [Ocean One](robots/ocean-one.md) + * [T-HR3 Humanoid Robot](robots/t-hr3-humanoid-robot.md) + * [collaborative transportation](robots/collaborative-transportation.md) + * [SEA-Snake](robots/sea-snake.md) + * [Aibo](robots/aibo.md) + * [IMPASS](robots/impass.md) + * [NABiRoS](robots/nabiros.md) + * [Giacometti Arm with Balloon Body](robots/giacometti-arm-with-balloon-body.md) + * [CoBot](robots/cobot.md) + * [Fetch机器人平台](robots/fetch-ji-qi-ren-ping-tai.md) + * [Salto](robots/salto.md) + * [Asimo](robots/asimo%20%281%29.md) + * [Sarcos Robotics Overview](robots/sarcos-robotics-overview.md) + * [ANYmal](robots/anymal.md) + * [BALLU](robots/ballu.md) + * [robonaut2](robots/robonaut2.md) + * [Fusion-远程操作的寄生机器人](robots/fusion-yuan-cheng-cao-zuo-de-ji-sheng-ji-qi-ren.md) + * [Epson SCARA Robot Overview](robots/epson-scara-robot-overview.md) + * [HERMES 遥操作机器人](robots/hermes-yao-cao-zuo-ji-qi-ren.md) + * [Laikago](robots/laikago.md) + * [Gila-Monster](robots/gila-monster.md) + * [Electrick](robots/electrick.md) + * [E2-DR](robots/e2-dr.md) + * [MIT Cheetah](robots/mit-cheetah.md) + * [Snake-Monster](robots/snake-monster.md) +* [Sensors](sensors/README.md) + * [霍尔 Hall effect sensor](sensors/huo-er-hall-effect-sensor.md) + * [编码器 Encoder](sensors/bian-ma-qi-encoder.md) + * [RGBcamera](sensors/rgbcamera.md) + * [IMU](sensors/imu.md) + * [Laser](sensors/laser.md) + * [学习型描述子](sensors/xue-xi-xing-miao-shu-zi.md) + * [DepthCamera](sensors/depthcamera.md) + * [EventCamera](sensors/eventcamera.md) + * [旋转变压器 Resolver](sensors/xuan-zhuan-bian-ya-qi-resolver.md) +* [Software](software/README.md) + * [Algorithms](software/algorithms/README.md) + * [Mobility](software/algorithms/mobility/README.md) + * [Aerial Robotics](software/algorithms/mobility/aerial-robotics/README.md) + * [PX4](software/algorithms/mobility/aerial-robotics/px4.md) + * [Planning](software/algorithms/mobility/aerial-robotics/planning.md) + * [Geometry](software/algorithms/mobility/aerial-robotics/geometry.md) + * [Control](software/algorithms/mobility/aerial-robotics/control.md) + * [Mechanics](software/algorithms/mobility/aerial-robotics/mechanics.md) + * [LeggedMobility](software/algorithms/mobility/leggedmobility.md) + * [Bioinspiration](software/algorithms/mobility/bioinspiration.md) + * [Templates](software/algorithms/mobility/templates.md) + * [MechanicalDynamicalSystems](software/algorithms/mobility/mechanicaldynamicalsystems.md) + * [Motion Planning](software/algorithms/motion-planning/README.md) + * [Visibility Graph](software/algorithms/motion-planning/visibility-graph/README.md) + * [Visibility Graph](software/algorithms/motion-planning/visibility-graph/visibility-graph.md) + * [Rapid Exploring Random Trees](software/algorithms/motion-planning/rapid-exploring-random-trees/README.md) + * [Rapid Exploring Random Trees](software/algorithms/motion-planning/rapid-exploring-random-trees/rapid-exploring-random-trees.md) + * [Probablistic Road Map](software/algorithms/motion-planning/probablistic-road-map/README.md) + * [Probablistic Road Map](software/algorithms/motion-planning/probablistic-road-map/probablistic-road-map.md) + * [Trapezoidal Decomposition](software/algorithms/motion-planning/trapezoidal-decomposition/README.md) + * [Trapezoidal Decomposition](software/algorithms/motion-planning/trapezoidal-decomposition/trapezoidal-decomposition.md) + * [Sample Based Planners](software/algorithms/motion-planning/sample-based-planners/README.md) + * [Characteristics of Sample Based Planners](software/algorithms/motion-planning/sample-based-planners/characteristics-of-sample-based-planners.md) + * [MoveIt](software/algorithms/motion-planning/moveit.md) + * [Perception](software/algorithms/perception/README.md) + * [Localization and Mapping](software/algorithms/perception/localization-and-mapping/README.md) + * [Principle](software/algorithms/perception/localization-and-mapping/principle/README.md) + * [localization](software/algorithms/perception/localization-and-mapping/principle/localization/README.md) + * [odometry](software/algorithms/perception/localization-and-mapping/principle/localization/odometry/README.md) + * [Odometry Modeling](software/algorithms/perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/README.md) + * [Odometry Modeling](software/algorithms/perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/odometry-modeling.md) + * [VisualOdometry](software/algorithms/perception/localization-and-mapping/principle/localization/odometry/visualodometry.md) + * [Iterative Closest Point](software/algorithms/perception/localization-and-mapping/principle/localization/iterative-closest-point/README.md) + * [Iterative Closest Point](software/algorithms/perception/localization-and-mapping/principle/localization/iterative-closest-point/iterative-closest-point.md) + * [Optical Flow](software/algorithms/perception/localization-and-mapping/principle/localization/optical-flow.md) + * [Particle Filter](software/algorithms/perception/localization-and-mapping/principle/localization/particle-filter.md) + * [LoopClosing](software/algorithms/perception/localization-and-mapping/principle/loopclosing.md) + * [mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/README.md) + * [Occupancy Grid Mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/README.md) + * [Occupancy Grid Mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/occupancy-grid-mapping.md) + * [Map Registration](software/algorithms/perception/localization-and-mapping/principle/mapping/map-registration/README.md) + * [Map Registration](software/algorithms/perception/localization-and-mapping/principle/mapping/map-registration/map-registration.md) + * [3D Mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/3d-mapping/README.md) + * [3D Mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/3d-mapping/3d-mapping.md) + * [mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/mapping/README.md) + * [mapping](software/algorithms/perception/localization-and-mapping/principle/mapping/mapping/mapping.md) + * [Optimization](software/algorithms/perception/localization-and-mapping/principle/optimization.md) + * [Project](software/algorithms/perception/localization-and-mapping/project/README.md) + * [ORB](software/algorithms/perception/localization-and-mapping/project/orb.md) + * [DVO](software/algorithms/perception/localization-and-mapping/project/dvo.md) + * [LSD](software/algorithms/perception/localization-and-mapping/project/lsd.md) + * [SVO](software/algorithms/perception/localization-and-mapping/project/svo.md) + * [Dataset](software/algorithms/perception/localization-and-mapping/dataset/README.md) + * [TUM](software/algorithms/perception/localization-and-mapping/dataset/tum.md) + * [KITTI](software/algorithms/perception/localization-and-mapping/dataset/kitti.md) + * [Basics](software/algorithms/perception/basics/README.md) + * [SingleViewGeometry](software/algorithms/perception/basics/singleviewgeometry.md) + * [CameraModeling](software/algorithms/perception/basics/cameramodeling.md) + * [PointandLineDuality](software/algorithms/perception/basics/pointandlineduality.md) + * [PinholeCameraModel](software/algorithms/perception/basics/pinholecameramodel.md) + * [PerspectiveProjection](software/algorithms/perception/basics/perspectiveprojection.md) + * [Recognition](software/algorithms/perception/recognition/README.md) + * [CNNs](software/algorithms/perception/recognition/cnns.md) + * [Learning](software/algorithms/learning/README.md) + * [Estimation](software/algorithms/learning/estimation/README.md) + * [ParticleFilters](software/algorithms/learning/estimation/particlefilters.md) + * [TargetTracking](software/algorithms/learning/estimation/targettracking.md) + * [KalmanFilter](software/algorithms/learning/estimation/kalmanfilter.md) + * [MaximumLikelihoodEstimate](software/algorithms/learning/estimation/maximumlikelihoodestimate/README.md) + * [MaximumLikelihoodEstimate](software/algorithms/learning/estimation/maximumlikelihoodestimate/maximumlikelihoodestimate.md) + * [Simulator](software/simulator/README.md) + * [Gazebo](software/simulator/gazebo.md) + * [Airsim](software/simulator/airsim.md) + * [VizDoom](software/simulator/vizdoom.md) + * [Rviz](software/simulator/rviz.md) + * [Library](software/library/README.md) + * [PCL](software/library/pcl.md) + * [OpenCV](software/library/opencv.md) + * [OpenGL](software/library/opengl.md) +* [Actuators](actuators/README.md) + * [Floating\_Spring\_Joint](actuators/floating_spring_joint.md) + * [A Detailed Look into SEA\(Serial Elastic Actuator\)](actuators/a-detailed-look-into-sea-serial-elastic-actuator.md) + * [MIT Optimal Actuator Design](actuators/mit-optimal-actuator-design.md) + diff --git a/Sensors/README.md b/Sensors/README.md new file mode 100644 index 0000000..e7fd3e1 --- /dev/null +++ b/Sensors/README.md @@ -0,0 +1,2 @@ +# Sensors + diff --git a/Sensors/bian-ma-qi-encoder.md b/Sensors/bian-ma-qi-encoder.md new file mode 100644 index 0000000..d23fd71 --- /dev/null +++ b/Sensors/bian-ma-qi-encoder.md @@ -0,0 +1,4 @@ +# 编码器 Encoder + +tag: _Sensor_ _Angle_ Auther: gzy + diff --git a/Sensors/huo-er-hall-effect-sensor.md b/Sensors/huo-er-hall-effect-sensor.md new file mode 100644 index 0000000..3b9ae6c --- /dev/null +++ b/Sensors/huo-er-hall-effect-sensor.md @@ -0,0 +1,4 @@ +# 霍尔 Hall effect sensor + +tag: _Sensor_ _Angle_ Auther: gzy + diff --git a/Sensors/xuan-zhuan-bian-ya-qi-resolver.md b/Sensors/xuan-zhuan-bian-ya-qi-resolver.md new file mode 100644 index 0000000..96233bf --- /dev/null +++ b/Sensors/xuan-zhuan-bian-ya-qi-resolver.md @@ -0,0 +1,4 @@ +# 旋转变压器 Resolver + +tag: _Sensor_ _Angle_ Auther: gzy + diff --git a/Sensors/xue-xi-xing-miao-shu-zi.md b/Sensors/xue-xi-xing-miao-shu-zi.md new file mode 100644 index 0000000..35603b6 --- /dev/null +++ b/Sensors/xue-xi-xing-miao-shu-zi.md @@ -0,0 +1,23 @@ +# 学习型描述子 + +学习型描述子 + +目前深度学习的识别率/匹配率是绝对比传统的匹配算法如SIFT要高的,但是深度神经网络识别率的提高建立在需求大量训练样本的基础上,在一些没有训练样本的应用(image stitching/ stereo mathing)仍然无法很方便的使用DNN来识别和匹配。但是未来会逐步侵占SIFT/SURF这种固定特征提取算法的生存空间。 + +* L2Net: progressive sampling strategy,relative distance between descriptors and extra supervision. CVPR 2017 +* HardNet: Working hard to know your neighbor's margins: Local descriptor learning loss. NIPS 2017 +* DeepCD: learns a pair of complementary descriptors of binary and float. ICCV 2017 +* Spread-out: regularization term to maximize the spread in feature descriptor inspired by the property of uniform distribution. ICCV 2017 \( pairwise and triplet losses + regularization technique\) +* PPFNet: Global Context Aware Local Features for Robust 3D Point Matching. CVPR 2018 \(N-tuple loss, 3D point cloud\) +* End-to-End Learning of Keypoint Detector and Descriptor for Pose Invariant 3D Matching. CVPR 2018 \(depth image\) + +L2Net + +第一个工作是L2Net,输入是32\*32 patch,输出是128位浮点型描述子,它输出的描述子能够在欧氏空间采用L2范数进行匹配,因此命名为L2Net。 ![](../.gitbook/assets/l2net.png) 该方法的原创性体现在以下四个方面: + +> 采用了一个渐进的采样策略,使得网络在有限步内能够获得大量的样本; 针对块匹配问题,描述子对描述子间的相对距离给予更高的权重; 在中间的特征层施加了额外的监督; 描述子的压缩性也被纳入了考虑范围。 + +HardNet 第二个工作是HardNet,受到Lowe的SIFT的匹配标准启发,引入一种度量学习所用的loss(最大化一个batch中的最近正样本与最近负样本的距离),作为目标函数。将这个loss与L2Net的结构结合,构成了HardNet 。 ![](../.gitbook/assets/hardnet.png) 代码链接:[DagnyT/hardnet](https://link.zhihu.com/?target=https%3A//github.com/DagnyT/hardnet) + +PPFNet 第五个是PPFNet,目的是为3D点云生成理想且鲁棒的3D局部特征子。该方法基于深度学习方法来生成易区分且抗旋转的3D局部特征子,首先,将一些简单的几何特征属性如:点的坐标、法线以及点对特征(point pair features, PPF),组合起来成原始特征;接着,又设计了一个新的损失函数:N-tuple Loss。其类似于contrastive loss,能同时将多个同类或者不同类样本嵌入到一个欧式空间中,样本之间的差异用其特征向量的欧式距离表示。最后,PPFNet网络的结构继承自PointNet,因此它天生就可以处理点云以及应对点的无序性。 最后,PPFNet网络的结构继承自PointNet,因此它天生就可以处理点云以及应对点的无序性。 ![](../.gitbook/assets/ppfnet.png) + diff --git a/Software/Algorithms/Learning/Estimation/MaximumLikelihoodEstimate/README.md b/Software/Algorithms/Learning/Estimation/MaximumLikelihoodEstimate/README.md new file mode 100644 index 0000000..6929999 --- /dev/null +++ b/Software/Algorithms/Learning/Estimation/MaximumLikelihoodEstimate/README.md @@ -0,0 +1,2 @@ +# MaximumLikelihoodEstimate + diff --git a/Software/Algorithms/Learning/Estimation/README.md b/Software/Algorithms/Learning/Estimation/README.md new file mode 100644 index 0000000..cf827ec --- /dev/null +++ b/Software/Algorithms/Learning/Estimation/README.md @@ -0,0 +1,2 @@ +# Estimation + diff --git a/Software/Algorithms/Learning/README.md b/Software/Algorithms/Learning/README.md new file mode 100644 index 0000000..c39c2a2 --- /dev/null +++ b/Software/Algorithms/Learning/README.md @@ -0,0 +1,2 @@ +# Learning + diff --git a/Software/Algorithms/Mobility/README.md b/Software/Algorithms/Mobility/README.md new file mode 100644 index 0000000..53794ae --- /dev/null +++ b/Software/Algorithms/Mobility/README.md @@ -0,0 +1,2 @@ +# Mobility + diff --git a/Software/Algorithms/Mobility/aerial-robotics/README.md b/Software/Algorithms/Mobility/aerial-robotics/README.md new file mode 100644 index 0000000..eb58d14 --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/README.md @@ -0,0 +1,2 @@ +# Aerial Robotics + diff --git a/Software/Algorithms/Mobility/aerial-robotics/control.md b/Software/Algorithms/Mobility/aerial-robotics/control.md new file mode 100644 index 0000000..70120de --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/control.md @@ -0,0 +1,4 @@ +# Control + + + diff --git a/Software/Algorithms/Mobility/aerial-robotics/geometry.md b/Software/Algorithms/Mobility/aerial-robotics/geometry.md new file mode 100644 index 0000000..de430e9 --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/geometry.md @@ -0,0 +1,4 @@ +# Geometry + + + diff --git a/Software/Algorithms/Mobility/aerial-robotics/mechanics.md b/Software/Algorithms/Mobility/aerial-robotics/mechanics.md new file mode 100644 index 0000000..ccdbb58 --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/mechanics.md @@ -0,0 +1,4 @@ +# Mechanics + + + diff --git a/Software/Algorithms/Mobility/aerial-robotics/planning.md b/Software/Algorithms/Mobility/aerial-robotics/planning.md new file mode 100644 index 0000000..d8a7fab --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/planning.md @@ -0,0 +1,4 @@ +# Planning + + + diff --git a/Software/Algorithms/Mobility/aerial-robotics/px4.md b/Software/Algorithms/Mobility/aerial-robotics/px4.md new file mode 100644 index 0000000..8350ec6 --- /dev/null +++ b/Software/Algorithms/Mobility/aerial-robotics/px4.md @@ -0,0 +1,4 @@ +# PX4 + + + diff --git a/Software/Algorithms/Perception/Basics/README.md b/Software/Algorithms/Perception/Basics/README.md new file mode 100644 index 0000000..2f3dac3 --- /dev/null +++ b/Software/Algorithms/Perception/Basics/README.md @@ -0,0 +1,2 @@ +# Basics + diff --git a/Software/Algorithms/Perception/README.md b/Software/Algorithms/Perception/README.md new file mode 100644 index 0000000..4fc1181 --- /dev/null +++ b/Software/Algorithms/Perception/README.md @@ -0,0 +1,2 @@ +# Perception + diff --git a/Software/Algorithms/Perception/Recognition/README.md b/Software/Algorithms/Perception/Recognition/README.md new file mode 100644 index 0000000..288a632 --- /dev/null +++ b/Software/Algorithms/Perception/Recognition/README.md @@ -0,0 +1,2 @@ +# Recognition + diff --git a/Software/Algorithms/Perception/localization-and-mapping/README.md b/Software/Algorithms/Perception/localization-and-mapping/README.md new file mode 100644 index 0000000..6a4a2fb --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/README.md @@ -0,0 +1,2 @@ +# Localization and Mapping + diff --git a/Software/Algorithms/Perception/localization-and-mapping/dataset/README.md b/Software/Algorithms/Perception/localization-and-mapping/dataset/README.md new file mode 100644 index 0000000..3c5437c --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/dataset/README.md @@ -0,0 +1,2 @@ +# Dataset + diff --git a/Software/Algorithms/Perception/localization-and-mapping/dataset/kitti.md b/Software/Algorithms/Perception/localization-and-mapping/dataset/kitti.md new file mode 100644 index 0000000..7b0ab95 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/dataset/kitti.md @@ -0,0 +1,6 @@ +# KITTI + +KITTI(Karlsruhe Institute of Technology and Toyota Technological Institute at Chicago)是鼎鼎有名的丰田汽车研究中心,KITTI采集了大量优质的城市道路环境的公开数据集。 + +除了提供数据外,还分为odometry、sceneflow evaluation、tracking、road/lane detection等多项任务,欢迎全世界的研究者公布自己在该任务中的成绩,其成绩榜单上的排名基本代表了该项技术目前世界最高水平。 [网址](http://www.cvlibs.net/datasets/kitti/eval_object.php) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/dataset/tum.md b/Software/Algorithms/Perception/localization-and-mapping/dataset/tum.md new file mode 100644 index 0000000..afd910a --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/dataset/tum.md @@ -0,0 +1,4 @@ +# TUM + +德国慕尼黑大学(Technical University of Munich)的机器视觉小组也是鼎鼎有名的计算机视觉方向的研究团体。他们公布的数据集了大量室内外场景的数据集,种类繁多,不一而足,不像KITTI那样专心在城市道路场景中。 [网址](https://vision.in.tum.de/data/datasets) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/README.md new file mode 100644 index 0000000..debe395 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/README.md @@ -0,0 +1,2 @@ +# Principle + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/README.md new file mode 100644 index 0000000..2cd01c9 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/README.md @@ -0,0 +1,2 @@ +# localization + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/README.md new file mode 100644 index 0000000..fb021de --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/README.md @@ -0,0 +1,2 @@ +# Iterative Closest Point + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/iterative-closest-point.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/iterative-closest-point.md new file mode 100644 index 0000000..c1cb4f3 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/iterative-closest-point/iterative-closest-point.md @@ -0,0 +1,4 @@ +# Iterative Closest Point + +这是一种在各种需要配准两个点云的应用中被广泛采用的算法。采用EM算法的思路,假定在获得一个较准确的先验位姿的前提下,寻找一个最优的R, T,使得x点云中每个点到y点云中离它最近的那个点的距离之和最小 ![Alt text](../../../../../../../.gitbook/assets/1546195833078.png) ![Alt text](../../../../../../../.gitbook/assets/1546195904221.png) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/README.md new file mode 100644 index 0000000..32a6f8a --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/README.md @@ -0,0 +1,2 @@ +# odometry + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/README.md new file mode 100644 index 0000000..184a829 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/README.md @@ -0,0 +1,2 @@ +# Odometry Modeling + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/odometry-modeling.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/odometry-modeling.md new file mode 100644 index 0000000..d3d7c75 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/odometry-modeling/odometry-modeling.md @@ -0,0 +1,6 @@ +# Odometry Modeling + +里程计最初是指通过车辆轮胎上的编码器,计算车轮行驶的距离,并从内轮差中估算出转角的一种测量算法。由于其不可避免的随机误差(车辆颠簸、打滑、计数错误引起),通常需要和陀螺仪配合使用以提升精度,但陀螺仪长期也会有积分漂移(温度引起),所以单靠里程计本身并不足以解决定位问题(想象一个人蒙上眼睛,通过数步数在旷野里行走会不会迷路...)。 + +![转角测量](../../../../../../../../.gitbook/assets/1546196782206.png) ![移动测量](../../../../../../../../.gitbook/assets/1546196799209.png) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/visualodometry.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/visualodometry.md new file mode 100644 index 0000000..8737961 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/odometry/visualodometry.md @@ -0,0 +1,76 @@ +# VisualOdometry + +\[TOC\] + +## 1. 里程计 + +在机器人导航问题中,里程计是用从运动执行器所获取的数据来对机器人在经历一段时间之后位置的变化进行估计的一种测量设备或仪器。传统的里程计比如安装在小车机器人上的光电码盘,通过脉冲技术可以确定在一段时间内车轮转过的角度,进而推算出小车行驶过的距离乃至方向。虽然这种光电码盘里程计在许多轮式和轨道机动车上应用广泛,但是它不能使用在许多采取非标准机动方式的移动机器人(比如无人机和足式机器人)上。此外,传统的里程计还有严重的精度问题。由于轮式机动车的轮胎和路面有时会发生打滑,所以轮胎转过的角度和机动车实际驶过的距离之间有时并不一致,而在一些坎坷的路面上行驶的时候产生的误差来源则更为复杂。这些误差会在里程计工作的时候不断累积,随着工作的时间越长,里程计的读数也会变得越来越不可靠。 + +## 2. 视觉里程计 + +随着摄像头的普及,人们发明了基于摄像头信息的一种新型里程计,称之为视觉里程计。视觉里程计通过分析处理摄像头所获取的相互关联的图像序列,来确定机器人移动的位置和朝向,从而进行导航。它具有比传统的基于光电码盘的里程计具有更好的导航精度,以及没有运动方式必须是行驶在平整路面上的轮式机器人(机动车)的限定,因此被应用在许多机器人应用中,比如漫游者火星探测器上就采用了视觉里程计\[1\]。此外在SLAM系统(Simultaneous Localization And Mapping)中,视觉里程计也是不可缺少的一部分。 + +经过数十年的发展,视觉里程计从硬件设备和软件算法两个层面出发,衍生出了多种分类。 + +## 2.1硬件设备层面分类 + +根据采用摄像头类型的不同,视觉里程计可以分为单目视觉里程计与立体视觉里程计。单目视觉里程计基于单个摄像头(称为单目),只能获取单目摄像头所处的三维空间投影到其成像平面上的二维影响。然后从二维影像中通过软件算法恢复出三维的运动信息,这对软件算法的设计和实际运行的实时性、精确性造成了一定的负担。而立体视觉里程计则倾向于将主要负担交给硬件设备,即采用立体摄像头(多目摄像头、深度摄像头、激光雷达都属于这一范畴),直接对摄像头所处的三维空间进行观测,观测得到的信息在计算机中以点云形式存储,然后通过软件算法从点云的时间序列中,估计出运动信息。这种方法虽然增加了获取到信息的丰富程度,在软件算法的设计和运动估计的准确度上比基于单目摄像头的视觉里程计具有一定优势,但是增加了硬件成本和功耗,对于一些对重力和电力载荷比较敏感的机器人应用场景,则不如单目视觉里程计合适。 + +## 2.2 软件算法层面分类 + +根据采用的硬件设备的不同,视觉里程计的软件算法也需要相应地设计。因此相应地产生了两种方法,基于特征点提取(Feature Based)的方法和直接法\(Direct Method\),即不提取特征点的方法。 + +### 2.2.1.1 特征点法 + +如何根据图像来估计相机运动是视觉里程计的核心。然而,图像在计算机内部是以一个包含亮度和色彩信息组成的矩阵形式存储的,如果直接从矩阵层面考虑运动估计,将会非常困难。所以,我们习惯于采用这样一种做法——首先,从图像中选取比较有代表性的点。这些点在相机视角发生少量变化后会保持不变,所以我们会在各个图像中找到相同的点。然后,在这些点的基础上,讨论相机位姿估计问题,以及这些点的定位问题。在经典 SLAM 模型中,把它们称为路标。而在视觉 SLAM 中,路标则是指图像特征(Features)。 + +根据维基百科中的定义,图像特征是一组与计算任务相关的信息,计算任务取决于具体的应用 \[2\]。简而言之,特征是图像信息的另一种数字表达形式。一组好的特征对于在指定任务上的最终表现至关重要,因此多年来研究者们花费了大量的精力对特征进行研究。数字图像在计算机中以灰度值矩阵的方式存储,所以最简单的,单个图像像素也是一种 “特征”。但是,在视觉里程计中,我们希望特征点在相机运动之后保持稳定,而灰度值受光照、形变、物体材质的影响严重,在不同图像之间变化非常大,不够稳定。理想的情况是,当场景和相机视角发生少量改变时,我们还能从图像中判断哪些地方是同一个点,因此仅凭灰度值是不够的,我们需要对图像提取特征点。 + +特征点是图像里一些特别的地方。我们可以把图像中的角点、边缘和区块都当成图像中有代表性的地方。然而,我们更容易精确地指出,某两幅图像当中出现了同一个角点;同一个边缘则稍微困难一些,因为沿着该边缘前进,图像局部是相似的; 同一个区块则是最困难的。可见,图像中的角点、边缘相比于像素区块而言更加“特别”,它们在不同图像之间的辨识度更强。所以,一种直观的提取特征的方式就是在不同图 像间辨认角点,确定它们的对应关系。在这种做法下,角点就是所谓的特征。 + +然而,在大多数应用中,单纯的角点依然不能满足很多我们的需求。例如,从远处看 上去是角点的地方,当相机走近之后,可能就不显示为角点了。或者,当旋转相机时,角点的外观会发生变化,我们也就不容易辨认出那是同一个角点。为此,计算机视觉领域的研究者在长期研究中,设计了许多更加稳定的局部图像特征,如著名的 SIFT\[3\], SURF\[4\], ORB\[5\] 等等。相比于朴素的角点,这些人工设计的特征点能够拥有如下的性质: + +1. 可重复性(Repeatability):相同的“区域”可以在不同的图像中被找到。 +2. 可区别性(Distinctiveness):不同的“区域”有不同的表达。 +3. 高效率(Efficiency):同一图像中,特征点的数量应远小于像素的数量。 +4. 本地性(Locality):特征仅与一小片图像区域相关。 + +特征点由关键点(Key-point)和描述子(Descriptor)两部分组成。比方说,当我们 谈论 SIFT 特征时,是指“提取 SIFT 关键点,并计算 SIFT 描述子”两件事情。关键点是指该特征点在图像里的位置,有些特征点还具有朝向、大小等信息。描述子通常是一个向量,按照某种人为设计的方式,描述了该关键点周围像素的信息。描述子是按照“外观相似的特征应该有相似的描述子”的原则设计的。因此,只要两个特征点的描述子在向量空间上的距离相近,就可以认为它们是同样的特征点。 + +历史上,研究者提出过许多图像特征。它们有些很精确,在相机的运动和光照变 化下仍具有相似的表达,但相应地需要较大的计算量。 其中,SIFT\(尺度不变特征变换,Scale-Invariant Feature Transform\) 当属最为经典的一种。它充分考虑了在图像变换过程中 出现的光照,尺度,旋转等变化,但随之而来的是极大的计算量。由于整个 SLAM 过程中,图像特征的提取与匹配仅仅是诸多环节中的一个,到目前(2017 年)为止,普通 PC 的 CPU 还无法实时地计算 SIFT 特征。所以在 视觉里程计算法的设计中我们甚少使用 这种“奢侈”的图像特征。 + +而另一些特征, 则考虑适当降低精度和鲁棒性,提升计算的速度。 例如 FAST 关键点属于计算特别快的一种特征点(注意这里“关键点”的用词, 说明它没有描述子)。 而 ORB(Oriented FAST and Rotated BRIEF)特征则是目前看来非常具有代表性的实时图像特征。它改进了 FAST 检测子 \[6\] 不具有方向性的问题,并采用速度极快的二进制描述 子 BRIEF\[7\], 使整个图像特征提取的环节大大加速。 根据作者在论文中的测试,在同一幅图像中同时提取约 1000 个特征点的情况下, ORB 约要花费 15.3ms, SURF 约花费 217.3ms,SIFT 约花费 5228.7ms。由此可以看出 ORB 在保持了特征子具有旋转,尺度不 变性的同时,速度方面提升明显,对于实时性要求很高的 SLAM 来说是一个很好的选择。 + +大部分特征提取都具有较好的并行性,可以通过 GPU 等设备来加速计算。经过 GPU 加速后的 SIFT,就可以满足实时计算要求。但是,引入 GPU 将带来视觉里程计系统成本的提升。由此带来的性能提升,是否足以抵去付出的计算成本?这是个需要在系统设计时仔细考量的问题。在目前的视觉里程计方案中,ORB 是质量与性能之间较好的折中,其提取特征的整个过程可以在引文\[8\]中找到。 + +在提取出特征点后,下一步就是如何建立帧间特征点的关联,也就是特征匹配。特征匹配是视觉里程计中极为关键的一步,宽泛地说,特征匹配解决了视觉里程计中的数据关联问题(data association),即确定当前看到的路标与之前看到的路标之间的对应关系。通过对图像与图像,或者图像与地图之间的描述子进行准确的匹配,我们可以为后续的姿态估计,优化等操作减轻大量负担。然而,由于图像特征的局部特性,误匹配的情况广泛存在,而且长期以来一直没有得到有效解决,目前已经成为视觉里程计中制约性能提升的一大瓶颈。部分原因是因为场景中经常存在大量的重复纹理,使得特征描述非常相似。在这种情况下,仅利用局部特征解决误匹配是非常困难的。 + +不过,让我们先来看正确匹配的情况,再回头去讨论误匹配问题。考虑两个时刻的图像。如果在图像It中提取到特征点, 在图像It+1中提取到特征点 ,如何寻找这两个集合元素的对应关系呢?最简单的特征 匹配方法就是暴力匹配(Brute-Force Matcher)。即对每一个特征点 ,与所有的 测量描述子的距离,然后排序,取最近的一个作为匹配点。描述子距离表示了两个特征之 间的相似程度,不过在实际运用中还可以取不同的距离度量范数。对于浮点类型的描述子,使用欧氏距离进行度量即可。而对于二进制的描述子(比如 BRIEF 这样的),我们往往使用汉明距离(Hamming distance)做为度量\(即两个二进制串之间的不同位数的个数\)。 + +然而,当特征点数量很大时,暴力匹配法的运算量将变得很大,特别是当我们想要匹配一个帧和一张地图的时候。这不符合我们在 SLAM 中的实时性需求。此时快速近似最近邻(FLANN)算法更加适合于匹配点数量极多的情况。由于这些匹配算法理论已经成熟,而且实现上也已集成到开源视觉算法库OpenCV之中,其实现的技术细节可以在引文 \[5\] 中找到。 + +现在,假设通过以上的特征点提取与匹配过程,我们已经从两张图像中, 得到了一对配对好的特征点。在得到了相邻两帧图像间的一对完成配对的特征点之后,可以利用匹配的特征点对之间存在的对极约束,采用直接线性变换法(Direct Linear Transform),求解出其本质矩阵E(Essential Matrix)和基础矩阵F(Fundamental Matrix),进而根据的关系得到帧间摄像头的旋转矩阵R和平移向量t。实际中只需要在相邻两帧图像之间找到5对匹配的特征点,即可求解出摄像头在拍摄第二张照片时的位姿相对于第一张照片拍摄时的旋转和平移运动信息。 但由于E 本身具有尺度等价性,它分解得到的 t, R 也有一个尺度等价性。而 R ∈ SO\(3\) 自身具有约束,所以我们认为 t 具有一个尺度。换言之,在分解过程 中,对 t 乘以任意非零常数,分解都是成立的。直接导致了单目视觉的尺度不确定性(Scale Ambiguity)。换言之,在单目视觉里程计中,对轨迹和地图同时缩放任意倍数,我们得到的图像依然是一样的。为了克服尺度不确定的问题,单目视觉里程计都不可避免地需要有一个初始化的过程。初始化的两张图像必须有一定程度的平移,而后的轨迹和地图都将以此步 的平移为单位。(除了对 t 进行归一化之外,另一种方法是令初始化时所有的特征点平均深度为 1,也可以固定一个尺度。相比于令 t 长度为 1 的做法,把特征点深度归一化可以控制场景的规模大小,使计算在数值上更稳定些。不过这并没有理论上的差别。) 另一方面,从 E 分解到 R, t 的过程中,如果相机发生的是纯旋转,导致 t 为零,那么,得到的 E 也将为零,这将导致我们无从求解 R。因此,单目视觉里程计除了必须要有初始化的过程外,还要求初始化不能只有纯旋转,必须要有一定程度的平移。如果没有平移,单目将无法初始化。这些都是通过匹配特征点之间的对极几何约束估计相机运动的局限之处。 + +在通过提取特征点、特征点匹配、基于对极几何估计相机运动之后,还可通过三角测量(Triangulation)的方法来估计像素特征点的深度信息。从而从2D的投影图像的信息中恢复出3D的空间点。在恢复出3D的空间点之后,可以通过PnP问题(Perspective-n-Point——即当我们知道 n 个 3D 空间点以及它们的投影位置时,如何估计相机所在的位姿的问题),利用P3P法、DLT法、EPnP法、UPnP法、以及通过非线性优化来最小化重投影误差的方法, 反过来求解3D到2D点对的运动。这些方法使得基于特征点提取的运动信息估计,在对极几何求解的基础上更加准确,同时也获得了对于特征点在三维空间中位置的估计。 + +### 2.2.1.2特征点法的优缺点 + +特征点法发展比较成熟,运行比较稳定,并且相较于直接法具有对光照、动态物体不敏感的有点,目前在视觉里程计中占据主流地位。 尽管如此,研究者们认为它至少有以下几个缺点: 1. 关键点的提取与描述子的计算非常耗时。实践当中,SIFT目前在CPU上是无法实时计算的,而ORB也需要近20毫秒的计算。如果整个SLAM以30毫秒/帧的速度运行,那么一大半时间都花在计算特征点上。 2. 使用特征点时,忽略了除特征点以外的所有信息。一张图像有几十万个像素,而特征点只有几百个。只使用特征点丢弃了大部分可能有用的图像信息。 3. 相机有时会运动到特征缺失的地方,往往这些地方都没有什么明显的纹理信息。例如,有时我们会面对一堵白墙,或者一个空荡荡的走廓。这些场景下特征点数量会明显减少,我们可能找不到足够的匹配点来计算相机运动。 + +### 2.2.2.1直接法 + +近几年,随着深度摄像头和激光雷达等新型立体摄像头的兴起给视觉里程计的算法设计带来了新的思路,使人们有条件抛开特征点提取及其所引起的麻烦,另辟新路,基于光流提出了直接计算的方法,即根据图像的像素信息来计算相机运动,从而避免了特征的计算时间,也避免了特征缺失的情况。只要场景中存在明暗变化(可以是渐变,不形成局部的图像特征),直接法就能工作。 + +使用特征点法估计相机运动时,我们把特征点看作固定在三维空间的不动点。根据它们在相机中的投影位置,通过最小化重投影误差(Reprojection error)来优化相机运动。在这个过程中,我们需要精确地知道空间点在两个相机中投影后的像素位置——这也就是我们为何要对特征进行匹配或跟踪的理由。而在直接法中,最小化的不再是重投影误差,而是测量误差(Phometric error)。而根据使用图像中像素数量的不同,直接法可以分为稀疏、稠密和半稠密三种,具有恢复稠密结构的能力。相比于特征点法通常只能重构稀疏特征点,直接法和稠密重建有更紧密的联系。 + +#### 2.2.2.2直接法的优缺点: + +相比于特征点法,直接法具有以下优点: + +* 可以省去计算特征点、描述子的时间。 +* 利用了全部图像中的大部分信息。由于特征点法图像中提取的特征点信息都是稀疏的(换句话说,在图像中只有一些感兴趣的点被提取了出来,通常一幅图像中提取的信息点数目在50~500之间) 通过这一步预拣选的操作,许多有价值的信息都丢失了。 +* 只要求有像素梯度即可,无须特征点。因此,直接法可以在特征缺失的场合下使用(一个极端的例子是只有渐变的一张图像。它可能无法提取角点类特征,但可以用直接法估计它的运动)。 +* 可以构建半稠密乃至稠密的地图,这是特征点法无法做到的。 + +然而,直接法也受到传感器设备的限制,由于直接法需要以获得像素点深度为前提,所以无法在单目摄像头上使用,只能应用于深度摄像头或者激光雷达的场景。虽然计算复杂度比起特征点法显著降低了,但是对于硬件设备的要求提高了。 + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/optical-flow.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/optical-flow.md new file mode 100644 index 0000000..fc03081 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/optical-flow.md @@ -0,0 +1,4 @@ +# Optical Flow + +光流跟踪是一种根据图像灰度在时间轴上变化的连续性来估计速度的一种方法。 + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/localization/particle-filter.md b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/particle-filter.md new file mode 100644 index 0000000..f8f505d --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/localization/particle-filter.md @@ -0,0 +1,4 @@ +# Particle Filter + +这是Map Registration地图配准的最常用算法。基于采样和概率模型。先随机采样一些在均值附近的位姿,然后跟踪这些点随里程计的运动过程,在跟踪过程中进行优化、过滤,得出最可能的位姿。详情可参考这篇博客园的[博客](http://www.cnblogs.com/sanmenyi/p/7091978.html) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/loopclosing.md b/Software/Algorithms/Perception/localization-and-mapping/principle/loopclosing.md new file mode 100644 index 0000000..8f9a3f9 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/loopclosing.md @@ -0,0 +1,6 @@ +# LoopClosing + +如果在机器人导航过程中,重复经过同一个地点,如果SLAM系统能够检测到这一点,那么就有希望利用前后两次经过了同一地点这一信息,提升后端优化的精度,这一检测过程即为回环检测。 + +详细情况可参见高博的这篇博客: [https://www.cnblogs.com/gaoxiang12/p/4754948.html](https://www.cnblogs.com/gaoxiang12/p/4754948.html) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/3d-mapping.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/3d-mapping.md new file mode 100644 index 0000000..8843c79 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/3d-mapping.md @@ -0,0 +1,6 @@ +# 3D Mapping + +当数据维度从2维扩展到3维,Occupancy Grid Mapping 在信息存储上低效的缺点就充分暴露了出来: ![Alt text](../../../../../../../.gitbook/assets/1546192242493.png) 因此Occupancy Grid Map 的数据结构常用于2维地图的构建。 + +而列表法的表示虽然在信息存储上非常高效,但是搜索起来运算量太大: ![Alt text](../../../../../../../.gitbook/assets/1546192300405.png) 因此在3维中通常使用的是树结构,常用的树结构有两种:[KD树](https://en.wikipedia.org/wiki/K-d_tree)和[八叉树](https://en.wikipedia.org/wiki/Octree)。 ![](../../../../../../../.gitbook/assets/1546192397305.png) ![Alt text](../../../../../../../.gitbook/assets/1546192501079.png) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/README.md new file mode 100644 index 0000000..af1b712 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/3d-mapping/README.md @@ -0,0 +1,2 @@ +# 3D Mapping + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/README.md new file mode 100644 index 0000000..f4f4589 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/README.md @@ -0,0 +1,2 @@ +# mapping + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/README.md new file mode 100644 index 0000000..788163a --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/README.md @@ -0,0 +1,2 @@ +# Map Registration + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/map-registration.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/map-registration.md new file mode 100644 index 0000000..554388e --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/map-registration/map-registration.md @@ -0,0 +1,10 @@ +# Map Registration + +LIDAR depth sensor \(Light Detection and Ranging\), 通常嵌入在激光雷达中,提供一个二维或三维的点,可建模如下: ![Alt text](../../../../../../../.gitbook/assets/1546193899530.png) + +地图配准即是在已知一个全局地图的情况下,将LIDAR depth sensor的当前读数与之匹配,如下图所示: ![Alt text](../../../../../../../.gitbook/assets/1546193952850.png) + +![Alt text](../../../../../../../.gitbook/assets/1546193969018.png) ![Alt text](../../../../../../../.gitbook/assets/1546193984129.png) ![Alt text](../../../../../../../.gitbook/assets/1546193989361.png) + +$a-b$ + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/README.md new file mode 100644 index 0000000..f4f4589 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/README.md @@ -0,0 +1,2 @@ +# mapping + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/mapping.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/mapping.md new file mode 100644 index 0000000..e69a074 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/mapping/mapping.md @@ -0,0 +1,6 @@ +# mapping + +地图可以分为以下三种 ![Alt text](../../../../../../../.gitbook/assets/1546190477979.png) 有测度的坐标系地图。 ![Alt text](../../../../../../../.gitbook/assets/1546190517286.png) 只存储拓扑结构的地图。 ![Alt text](../../../../../../../.gitbook/assets/1546190552329.png) 以及带有上层语义信息的地图。(需要人工标注或者上层强大的模式识别算法支持) + +在机器人学中,底层最常用的是第一种,metric map。 + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/README.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/README.md new file mode 100644 index 0000000..9cd4ec1 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/README.md @@ -0,0 +1,2 @@ +# Occupancy Grid Mapping + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/occupancy-grid-mapping.md b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/occupancy-grid-mapping.md new file mode 100644 index 0000000..180c27b --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/mapping/occupancy-grid-mapping/occupancy-grid-mapping.md @@ -0,0 +1,4 @@ +# Occupancy Grid Mapping + +一、栅格的定义 将连续空间离散化成一个个栅格。然后每个栅格用一个布尔随机变量来描述: ![Alt text](../../../../../../../.gitbook/assets/1546190828503.png) 二、传感器测量模型 激光传感器的一次测量可以确定一条线段上所有栅格是否被占用(不是一般性,假设是单线激光雷达),相应的附一个概率值,总共有如下四中情况 ![Alt text](../../../../../../../.gitbook/assets/1546190853493.png) 三、带入贝叶斯公式 在(随机)先验的地图下,通过读取激光雷达传感器的测量数据,对地图用贝叶斯公式进行实时更新: ![Alt text](../../../../../../../.gitbook/assets/1546190996920.png) 四、Log-odd 定义odd运算,然后再取log,可以将贝叶斯公式中的相乘各项化为相加,达到变量分离方便迭代更新的目的: ![odd的定义](../../../../../../../.gitbook/assets/1546191110511.png) ![取对数](../../../../../../../.gitbook/assets/1546191222687.png) ![log odd更新](../../../../../../../.gitbook/assets/1546191243862.png) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/principle/optimization.md b/Software/Algorithms/Perception/localization-and-mapping/principle/optimization.md new file mode 100644 index 0000000..38ffbc4 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/principle/optimization.md @@ -0,0 +1,4 @@ +# Optimization + +优化方法在计算机领域的应用相当广泛,具体到机器人的建图问题中,主要是利用帧间信息提升前端估计的精度,主要有Bundle Adjustment集束调整,图优化,位子图Pose Graph 和因子图集中。 详情可参见高翔博士的[后端1](https://blog.csdn.net/qq_23225073/article/details/78777311#2-ba与图优化)与[后端2](https://blog.csdn.net/qq_23225073/article/details/78844572)两篇博文 + diff --git a/Software/Algorithms/Perception/localization-and-mapping/project/README.md b/Software/Algorithms/Perception/localization-and-mapping/project/README.md new file mode 100644 index 0000000..635b4cb --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/project/README.md @@ -0,0 +1,2 @@ +# Project + diff --git a/Software/Algorithms/Perception/localization-and-mapping/project/dvo.md b/Software/Algorithms/Perception/localization-and-mapping/project/dvo.md new file mode 100644 index 0000000..f2d07f9 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/project/dvo.md @@ -0,0 +1,6 @@ +# DVO + +[DVO项目地址](https://github.com/tum-vision/dvo) + +后来TUM组还在DVO基础上加上了后端的实现,构成了[DVOslam项目](https://github.com/tum-vision/dvo_slam) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/project/lsd.md b/Software/Algorithms/Perception/localization-and-mapping/project/lsd.md new file mode 100644 index 0000000..1dfb8f5 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/project/lsd.md @@ -0,0 +1,4 @@ +# LSD + +[网址](https://github.com/tum-vision/lsd_slam) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/project/orb.md b/Software/Algorithms/Perception/localization-and-mapping/project/orb.md new file mode 100644 index 0000000..7f50136 --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/project/orb.md @@ -0,0 +1,4 @@ +# ORB + +可以说是目前综合性能最好的SLAM了,有单目、立体、RGB-D三种实现,代码风格也好,适合SLAM爱好者钻研。ORB是作者提出的一种特征点提取算子,具有很好的特性。[网址](https://github.com/raulmur/ORB_SLAM2) + diff --git a/Software/Algorithms/Perception/localization-and-mapping/project/svo.md b/Software/Algorithms/Perception/localization-and-mapping/project/svo.md new file mode 100644 index 0000000..c24b49b --- /dev/null +++ b/Software/Algorithms/Perception/localization-and-mapping/project/svo.md @@ -0,0 +1,4 @@ +# SVO + +这种视觉里程计是目前运算量最小的,比较适合放在计算资源有限的移动机器人平台上。采用直接法和最小化重投影误差。 [github项目地址](https://github.com/uzh-rpg/rpg_svo) [SVO原理解析-博客园](http://www.cnblogs.com/luyb/p/5773691.html) + diff --git a/Software/Algorithms/README.md b/Software/Algorithms/README.md new file mode 100644 index 0000000..4bf2867 --- /dev/null +++ b/Software/Algorithms/README.md @@ -0,0 +1,2 @@ +# Algorithms + diff --git a/Software/Algorithms/motion-planning/README.md b/Software/Algorithms/motion-planning/README.md new file mode 100644 index 0000000..060e546 --- /dev/null +++ b/Software/Algorithms/motion-planning/README.md @@ -0,0 +1,2 @@ +# Motion Planning + diff --git a/Software/Algorithms/motion-planning/moveit.md b/Software/Algorithms/motion-planning/moveit.md new file mode 100644 index 0000000..e53cb0f --- /dev/null +++ b/Software/Algorithms/motion-planning/moveit.md @@ -0,0 +1,4 @@ +# MoveIt + + + diff --git a/Software/Algorithms/motion-planning/probablistic-road-map/README.md b/Software/Algorithms/motion-planning/probablistic-road-map/README.md new file mode 100644 index 0000000..1f2b7c1 --- /dev/null +++ b/Software/Algorithms/motion-planning/probablistic-road-map/README.md @@ -0,0 +1,2 @@ +# Probablistic Road Map + diff --git a/Software/Algorithms/motion-planning/probablistic-road-map/probablistic-road-map.md b/Software/Algorithms/motion-planning/probablistic-road-map/probablistic-road-map.md new file mode 100644 index 0000000..91bb91b --- /dev/null +++ b/Software/Algorithms/motion-planning/probablistic-road-map/probablistic-road-map.md @@ -0,0 +1,13 @@ +# Probablistic Road Map + +伪代码: ![Alt text](../../../../.gitbook/assets/1546200547561.png) + +1. 生成随机图: + + ![Alt text](../../../../.gitbook/assets/1546200571261.png) + +2. 碰撞检测![Alt text](../../../../.gitbook/assets/1546200603529.png) +3. 用图规划问题的算法求解 + + ![Alt text](../../../../.gitbook/assets/1546200682136.png) + diff --git a/Software/Algorithms/motion-planning/rapid-exploring-random-trees/README.md b/Software/Algorithms/motion-planning/rapid-exploring-random-trees/README.md new file mode 100644 index 0000000..484d157 --- /dev/null +++ b/Software/Algorithms/motion-planning/rapid-exploring-random-trees/README.md @@ -0,0 +1,2 @@ +# Rapid Exploring Random Trees + diff --git a/Software/Algorithms/motion-planning/rapid-exploring-random-trees/rapid-exploring-random-trees.md b/Software/Algorithms/motion-planning/rapid-exploring-random-trees/rapid-exploring-random-trees.md new file mode 100644 index 0000000..0569936 --- /dev/null +++ b/Software/Algorithms/motion-planning/rapid-exploring-random-trees/rapid-exploring-random-trees.md @@ -0,0 +1,24 @@ +# Rapid Exploring Random Trees + +* Add start node to tree +* Repeat n times + * Generate a random configuration, x + * If x is in freespace using the CollisionCheck function + * Find y, the closest node in the tree to the random configuration + * If \(Dist \(x, y\) > delta\) – Check if x is too far from y + * Find a configuration, z, that is along the path from x to y such that Dist\(z,y\) <= delta + * x = z; + * If \(LocalPlanner \(x,y\)\) – Check if you can get from x to y Add x to the tree with y as its parent + +![Alt text](../../../../.gitbook/assets/1546201707049.png) + +* While not done + * Extend Tree A by adding a new node, x + * Find the closest node in Tree B to x, y + * If \(LocalPlanner\(x,y\)\) – Check if you can bridge the 2 trees + * Add edge between x and y. + * This completes a route between the root of Tree A and the root of Tree B. Return this route - -- + * Else + * Swap Tree A and Tree B + * ![Alt text](../../../../.gitbook/assets/1546201717793.png) + diff --git a/Software/Algorithms/motion-planning/sample-based-planners/README.md b/Software/Algorithms/motion-planning/sample-based-planners/README.md new file mode 100644 index 0000000..bab2891 --- /dev/null +++ b/Software/Algorithms/motion-planning/sample-based-planners/README.md @@ -0,0 +1,2 @@ +# Sample Based Planners + diff --git a/Software/Algorithms/motion-planning/sample-based-planners/characteristics-of-sample-based-planners.md b/Software/Algorithms/motion-planning/sample-based-planners/characteristics-of-sample-based-planners.md new file mode 100644 index 0000000..0f5b61e --- /dev/null +++ b/Software/Algorithms/motion-planning/sample-based-planners/characteristics-of-sample-based-planners.md @@ -0,0 +1,20 @@ +# Characteristics of Sample Based Planners + +There are a couple of characteristics of Random Sampling Based approaches that are worth noting. + +• First of all, while these methods work very well in practice they are not strictly speaking complete. + +• A complete path planning algorithm would find a path if one existed and report failure if it didn’t. + +With the PRM procedure it is possible to have a situation where the algorithm would fail to find a path even when one exists if the sampling procedure fails to generate an appropriate set of samples. + +![Alt text](../../../../.gitbook/assets/1546201453749.png) ![Alt text](../../../../.gitbook/assets/1546201460892.png) + +What we can say is that if there is a route and the planner keeps adding random samples it will, eventually find a solution. + +• However it may take a long time to generate a sufficient number of samples. + +• A real advantage of these PRM based planners is that they can be applied to systems with lots of degrees of freedom as opposed to grid based sampling schemes which are typically restricted to problems in 2 or 3 dimensions. + +• In conclusion by relaxing the notion of completeness a bit and embracing the power of randomization these probablistic road map algorithms provide effective methods for planning routes that can be applied to a wide range of robotic systems including systems with many degrees of freedom. + diff --git a/Software/Algorithms/motion-planning/trapezoidal-decomposition/README.md b/Software/Algorithms/motion-planning/trapezoidal-decomposition/README.md new file mode 100644 index 0000000..b4ce642 --- /dev/null +++ b/Software/Algorithms/motion-planning/trapezoidal-decomposition/README.md @@ -0,0 +1,2 @@ +# Trapezoidal Decomposition + diff --git a/Software/Algorithms/motion-planning/trapezoidal-decomposition/trapezoidal-decomposition.md b/Software/Algorithms/motion-planning/trapezoidal-decomposition/trapezoidal-decomposition.md new file mode 100644 index 0000000..7361a96 --- /dev/null +++ b/Software/Algorithms/motion-planning/trapezoidal-decomposition/trapezoidal-decomposition.md @@ -0,0 +1,6 @@ +# Trapezoidal Decomposition + +处理Configuration Space的一种方法,将多边形图切割成一个个梯形 + +![Alt text](../../../../.gitbook/assets/1546201910730.png) + diff --git a/Software/Algorithms/motion-planning/visibility-graph/README.md b/Software/Algorithms/motion-planning/visibility-graph/README.md new file mode 100644 index 0000000..ec41837 --- /dev/null +++ b/Software/Algorithms/motion-planning/visibility-graph/README.md @@ -0,0 +1,2 @@ +# Visibility Graph + diff --git a/Software/Algorithms/motion-planning/visibility-graph/visibility-graph.md b/Software/Algorithms/motion-planning/visibility-graph/visibility-graph.md new file mode 100644 index 0000000..ab72762 --- /dev/null +++ b/Software/Algorithms/motion-planning/visibility-graph/visibility-graph.md @@ -0,0 +1,4 @@ +# Visibility Graph + +首先构建Visibility Graph: ![Alt text](../../../../.gitbook/assets/1546200430877.png) 然后随机取点并进行碰撞检测得到边: ![Alt text](../../../../.gitbook/assets/1546200410450.png) 最后规划出路径: ![Alt text](../../../../.gitbook/assets/1546200467850.png) + diff --git a/Software/Library/README.md b/Software/Library/README.md new file mode 100644 index 0000000..1b016ab --- /dev/null +++ b/Software/Library/README.md @@ -0,0 +1,2 @@ +# Library + diff --git a/Software/README.md b/Software/README.md new file mode 100644 index 0000000..2a4001e --- /dev/null +++ b/Software/README.md @@ -0,0 +1,2 @@ +# Software + diff --git a/Software/Simulator/README.md b/Software/Simulator/README.md new file mode 100644 index 0000000..cdaf20c --- /dev/null +++ b/Software/Simulator/README.md @@ -0,0 +1,2 @@ +# Simulator +