> 文档中心 > MetaModel元数据模型-初识

MetaModel元数据模型-初识

MetaModel元数据模型-初识

    • 一、前言
      • 1.简介
      • 2.文档
    • 二、集成
    • 三、使用示例
      • 1.CsvTest
      • 2.JdbcTest
      • 3.JsonTest
      • 4.XmlTest

一、前言

1.简介

MetaModel 是用来重建 SQL 99 兼容的数据库元模型。
MetaModel 提供了一个数据为中心的 API 用于探索、查询和更新数据,数据格式支持关系数据库(JDBC)、CSV 文件、Excel 表格、MongoDB 和 CouchDB 等等。你可以编写真正跨存储系统的应用程序。
此外 MetaModel 的 API 非常易学易用,类似于 SQL 语法,但是类型安全的,你可以把它想象成 LinQ for Java。

2.文档

METAMODEL WIKI

Datastore type DataContext class name Properties “type” value Variants supported/available
JDBC / Relational Databases JdbcDataContext jdbc PostgreSQL, MySQL, Microsoft SQL Server, Oracle DB, IBM DB2 Apache Hive, Apache Derby, SQLite, Ingres, H2, HSQLDB (… and many more)
CSV files CsvDataContext csv Comma-separated Tab-separated Anything-separated files
Fixed width value files FixedWidthDataContext fixed-width Fixed width EBCDIC
Microsoft Excel spreadsheets ExcelDataContext excel .xls .xlsx
Microsoft Access database files AccessDataContext - .mdb .accdb
OpenOffice.org database files OpenOfficeDataContext - .odb
XML files XmlDomDataContext XmlSaxDataContext xml DOM-based parsing (all in-memory) SAX-based parsing (streaming)
JSON files JsonDataContext json
ElasticSearch ElasticSearchDataContext ElasticSearchRestDataContext elasticsearch Native Java API RESTful JSON API
MongoDB MongoDbDataContext - MongoDB 2.x MongoDB 3.x
Apache CouchDB CouchDbDataContext couchdb
Apache Cassandra CassandraDataContext cassandra
Apache HBase HBaseDataContext hbase
Apache Kafka KafkaDataContext
Neo4j Neo4jDataContext -
Salesforce.com SalesforceDataContext salesforce
SugarCRM SugarCrmDataContext -
Java collections PojoDataContext pojo Collection Collection<Map> Collection
Collections of other datastores CompositeDataContext

二、集成

 <dependency>     <groupId>org.apache.metamodel</groupId>     <artifactId>MetaModel-full</artifactId>     <version>5.3.3</version> </dependency>

三、使用示例

gitee源码:https://gitee.com/ryou5416/meta-model.git

1.CsvTest

package com.example.metamodel;import org.apache.metamodel.DataContext;import org.apache.metamodel.UpdateableDataContext;import org.apache.metamodel.csv.CsvConfiguration;import org.apache.metamodel.csv.CsvDataContext;import org.apache.metamodel.data.DataSet;import org.apache.metamodel.factory.DataContextFactoryRegistryImpl;import org.apache.metamodel.factory.DataContextPropertiesImpl;import org.apache.metamodel.factory.ResourceFactory;import org.apache.metamodel.insert.InsertInto;import org.apache.metamodel.query.CompiledQuery;import org.apache.metamodel.query.Query;import org.apache.metamodel.query.QueryParameter;import org.apache.metamodel.schema.Schema;import org.apache.metamodel.schema.Table;import org.apache.metamodel.schema.TableType;import org.apache.metamodel.util.AbstractResource;import org.apache.metamodel.util.ClasspathResource;import org.apache.metamodel.util.FileResource;import org.apache.metamodel.util.Resource;import org.junit.jupiter.api.Test;import org.springframework.util.Assert;import org.springframework.util.ResourceUtils;import java.io.File;import java.io.FileNotFoundException;import java.util.Arrays;import java.util.List;import static org.apache.metamodel.factory.DataContextPropertiesImpl.*;/** * @author :ouruyi * @version 1.0 * @date :Created in 2021/7/5 14:56 * 功能描述: */public class CsvTest {    @Test    void test1() { /**  * 可设置表定义 {@link DataContextPropertiesImpl#PROPERTY_TABLE_DEFS}  */ final DataContextPropertiesImpl properties = new DataContextPropertiesImpl(); properties.put(PROPERTY_DATA_CONTEXT_TYPE, "csv"); properties.put(PROPERTY_COLUMN_NAME_LINE_NUMBER, 1); /**  * 可用前缀 file classpath mem http https hdfs  * @see ResourceFactory  */ properties.put(PROPERTY_RESOURCE_PROPERTIES, "classpath:/data/csv/data.csv"); DataContext dataContext = DataContextFactoryRegistryImpl.getDefaultInstance().createDataContext(properties); final Schema schema = dataContext.getDefaultSchema(); final List<Table> tables = schema.getTables(TableType.TABLE); final Table table = tables.get(0); DataSet result = dataContext.query().from(table).select(table.getColumns()).orderBy(table.getColumnByName("name")).execute(); try {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } } finally {     result.close(); }    }    @Test    void test2() { /**  * @see AbstractResource  */ Resource resource = new ClasspathResource("/data/csv/data.csv"); Assert.isTrue(resource.isExists(), "文件不存在!"); /**  * 可设置表定义 {@link org.apache.metamodel.schema.naming.CustomColumnNamingStrategy#CustomColumnNamingStrategy(java.lang.String...)}  */ CsvConfiguration configuration = new CsvConfiguration(1, "UTF-8", ',', '"', '\\'); DataContext dataContext = new CsvDataContext(resource, configuration); final Schema schema = dataContext.getDefaultSchema(); final List<Table> tables = schema.getTables(TableType.TABLE); final Table table = tables.get(0); DataSet result = dataContext.query().from(table).select(table.getColumns()).orderBy(table.getColumnByName("name")).execute(); try {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } } finally {     result.close(); }    }    /**     * 绑定变量 降低语法解析次数,提升性能及安全性     */    @Test    void test3() { /**  * @see AbstractResource  */ Resource resource = new ClasspathResource("/data/csv/data.csv"); Assert.isTrue(resource.isExists(), "文件不存在!"); /**  * 可设置表定义 {@link org.apache.metamodel.schema.naming.CustomColumnNamingStrategy#CustomColumnNamingStrategy(java.lang.String...)}  */ CsvConfiguration configuration = new CsvConfiguration(1, "UTF-8", ',', '"', '\\'); DataContext dataContext = new CsvDataContext(resource, configuration); final Schema schema = dataContext.getDefaultSchema(); final Table table = schema.getTableByName("default_table"); final Query query = dataContext  .query()  .from(table)  .select(table.getColumns())  .where("name").eq(new QueryParameter())  .orderBy(table.getColumnByName("name"))  .toQuery(); /**  * SELECT default_table.age FROM csv.default_table WHERE data.csv.name = ? ORDER BY data.csv.name ASC  * data.csv tableName  * csv scheme  * default_table 别名  */ System.out.println(query); // 预编译 类似于jdbc prepareStatement,对于JdbcDataContext,内部确实用的是prepareStatement(适配器模式) CompiledQuery compiledQuery = dataContext.compileQuery(query); try (DataSet result = dataContext.executeQuery(compiledQuery, "jan")) {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } }    }    /**     * 更新数据     * 注意:ClasspathResource默认“只读”(不推荐)     */    @Test    void update() throws FileNotFoundException { final File file = ResourceUtils.getFile("classpath:data/csv/data.csv"); Resource resource= new FileResource(file); Assert.isTrue(resource.isExists(), "文件不存在!"); CsvConfiguration configuration = new CsvConfiguration(1, "UTF-8", ',', '"', '\\'); UpdateableDataContext dataContext = new CsvDataContext(resource, configuration); final Schema schema = dataContext.getDefaultSchema(); final Table table = schema.getTableByName("data.csv"); // 写入target/classes/data/csv/data.csv dataContext.executeUpdate(new InsertInto(table).value("name", "Polly the Sheep").value("age", 10)); dataContext.executeUpdate(callback -> {     callback.insertInto(table).value("name", "Polly the Sheep").value("age", 10).execute();     callback.update(table).where("name").eq("Polly the Sheep").value("age", 20).execute();     callback.deleteFrom(table).where("name").eq("Polly the Sheep").execute(); }); /**  * https://cwiki.apache.org/confluence/display/METAMODEL/UpdateableDataContext  * 下面语句要单独写才可以删除掉记录?  */ dataContext.executeUpdate(callback -> callback.deleteFrom(table).where("name").eq("Polly the Sheep").execute());    }}

2.JdbcTest

package com.example.metamodel;import org.apache.metamodel.DataContext;import org.apache.metamodel.data.DataSet;import org.apache.metamodel.factory.DataContextFactoryRegistryImpl;import org.apache.metamodel.factory.DataContextPropertiesImpl;import org.apache.metamodel.jdbc.JdbcDataContext;import org.apache.metamodel.query.Query;import org.apache.metamodel.schema.Schema;import org.apache.metamodel.schema.Table;import org.apache.metamodel.schema.TableType;import org.junit.jupiter.api.Test;import org.springframework.boot.test.context.SpringBootTest;import org.springframework.core.env.Environment;import javax.sql.DataSource;import java.util.Arrays;import static org.apache.metamodel.factory.DataContextPropertiesImpl.*;/** * @author :ouruyi * @version 1.0 * @date :Created in 2021/7/5 14:42 * 功能描述: */@SpringBootTestpublic class JdbcTest {    @javax.annotation.Resource    Environment environment;    @javax.annotation.Resource    DataSource dataSource;    @Test    void test1() { final DataContextPropertiesImpl properties = new DataContextPropertiesImpl(); properties.put(PROPERTY_DATA_CONTEXT_TYPE, "jdbc"); properties.put(PROPERTY_URL, environment.getProperty("spring.datasource.url")); properties.put(PROPERTY_USERNAME, environment.getProperty("spring.datasource.username")); properties.put(PROPERTY_PASSWORD, environment.getProperty("spring.datasource.password")); properties.put(PROPERTY_TABLE_TYPES, new TableType[]{TableType.TABLE, TableType.VIEW}); DataContext dataContext = DataContextFactoryRegistryImpl.getDefaultInstance().createDataContext(properties); final Schema schema = dataContext.getDefaultSchema(); final Table user = schema.getTableByName("USER"); final Query query = dataContext.query().from(user).select(user.getColumns()).toQuery(); try (final DataSet result = dataContext.executeQuery(query)) {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } }    }    @Test    void test2() { final JdbcDataContext dataContext = new JdbcDataContext(dataSource); final Schema schema = dataContext.getDefaultSchema(); final Table user = schema.getTableByName("USER"); final Query query = dataContext.query().from(user).select(user.getColumns()).toQuery(); try (final DataSet result = dataContext.executeQuery(query)) {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } }    }}

3.JsonTest

package com.example.metamodel;import cn.hutool.core.io.FileUtil;import org.apache.metamodel.DataContext;import org.apache.metamodel.data.DataSet;import org.apache.metamodel.factory.DataContextFactoryRegistryImpl;import org.apache.metamodel.factory.DataContextPropertiesImpl;import org.apache.metamodel.json.JsonDataContext;import org.apache.metamodel.schema.Schema;import org.apache.metamodel.schema.Table;import org.apache.metamodel.schema.TableType;import org.apache.metamodel.util.ClasspathResource;import org.apache.metamodel.util.InMemoryResource;import org.apache.metamodel.util.Resource;import org.junit.jupiter.api.Test;import org.springframework.util.Assert;import java.io.IOException;import java.io.OutputStream;import java.util.Arrays;import java.util.List;import static org.apache.metamodel.factory.DataContextPropertiesImpl.PROPERTY_DATA_CONTEXT_TYPE;import static org.apache.metamodel.factory.DataContextPropertiesImpl.PROPERTY_RESOURCE_PROPERTIES;/** * @author :ouruyi * @version 1.0 * @date :Created in 2021/7/5 14:42 * 功能描述: */public class JsonTest {    @Test    void test1() { final DataContextPropertiesImpl properties = new DataContextPropertiesImpl(); properties.put(PROPERTY_DATA_CONTEXT_TYPE, "json"); properties.put(PROPERTY_RESOURCE_PROPERTIES, "classpath:/data/json/data.json"); DataContext dataContext = DataContextFactoryRegistryImpl.getDefaultInstance().createDataContext(properties); final Schema schema = dataContext.getDefaultSchema(); final List<Table> tables = schema.getTables(TableType.TABLE); final Table table = tables.get(0); DataSet result = dataContext.query().from(table).select(table.getColumns()).orderBy(table.getColumnByName("name")).execute(); try {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } } finally {     result.close(); }    }    @Test    void test2() { Resource resource = new ClasspathResource("/data/json/data.json"); Assert.isTrue(resource.isExists(), "文件不存在!"); final JsonDataContext dataContext = new JsonDataContext(resource); final Schema schema = dataContext.getDefaultSchema(); final List<Table> tables = schema.getTables(TableType.TABLE); final Table table = tables.get(0); DataSet result = dataContext.query().from(table).select(table.getColumns()).orderBy(table.getColumnByName("name")).execute(); try {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } } finally {     result.close(); }    }    @Test    void test3() throws IOException { byte[] bytes = FileUtil.readBytes("data/json/data.json"); Resource resource = new InMemoryResource("/data/json/data.json"); try (final OutputStream outputStream = resource.write()) {     outputStream.write(bytes);     /**      * 请务必调用该方法      * @see org.apache.metamodel.util.InMemoryResource#createOutputStream(boolean)      */     outputStream.flush(); } final JsonDataContext dataContext = new JsonDataContext(resource); final Schema schema = dataContext.getDefaultSchema(); final List<Table> tables = schema.getTables(TableType.TABLE); final Table table = tables.get(0); System.out.println(table.getColumns()); DataSet result = dataContext.query().from(table).select(table.getColumns()).execute(); try {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } } finally {     result.close(); }    }}

4.XmlTest

package com.example.metamodel;import org.apache.metamodel.data.DataSet;import org.apache.metamodel.query.Query;import org.apache.metamodel.schema.Column;import org.apache.metamodel.schema.Schema;import org.apache.metamodel.schema.Table;import org.apache.metamodel.util.ClasspathResource;import org.apache.metamodel.util.Resource;import org.apache.metamodel.xml.XmlSaxDataContext;import org.apache.metamodel.xml.XmlSaxTableDef;import org.junit.jupiter.api.Test;import org.springframework.util.Assert;import java.util.Arrays;/** * @author :ouruyi * @version 1.0 * @date :Created in 2021/7/5 14:42 * 功能描述:推荐使用XmlSaxDataContext,官方已不再建议使用XmlDomDataContext */public class XmlTest {    @Test    void test1() {    }    @Test    void test2() { Resource resource = new ClasspathResource("/data/xml/data.xml"); Assert.isTrue(resource.isExists(), "文件不存在!"); XmlSaxTableDef employeeTableDef = new XmlSaxTableDef(  "/root/organization/employees/employee",  new String[] {   "/root/organization/employees/employee/name",   "/root/organization/employees/employee/gender",   "index(/root/organization)"  } ); XmlSaxTableDef organizationTableDef = new XmlSaxTableDef(  "/root/organization",  new String[] {   "/root/organization/name",   "/root/organization@type"  } ); final XmlSaxDataContext dataContext = new XmlSaxDataContext(resource, employeeTableDef, organizationTableDef); final Schema schema = dataContext.getDefaultSchema(); System.out.println(schema.getTableNames()); final Table organizationTable = schema.getTableByName("/organization"); final Table employeeTable = schema.getTableByName("/employee"); Column fk = employeeTable.getColumnByName("index(/root/organization)"); Column empName = employeeTable.getColumnByName("/name"); Column gender = employeeTable.getColumnByName("/gender"); Column orgId = organizationTable.getColumnByName("row_id"); Column orgName = organizationTable.getColumnByName("/name"); Query q = dataContext.query().from(employeeTable)  .innerJoin(organizationTable).on(fk, orgId)  .select(empName).as("employee")  .select(gender).as("gender")  .select(orgName).as("company").toQuery(); try (DataSet result = dataContext.executeQuery(q)) {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } }    }    @Test    void test3() { Resource resource = new ClasspathResource("/data/xml/data.xml"); Assert.isTrue(resource.isExists(), "文件不存在!"); XmlSaxTableDef tableDef = new XmlSaxTableDef(  "/root/organization/employees/employee",  new String[] {   "/root/organization/employees/employee/name",   "/root/organization/employees/employee/gender"  } ); final XmlSaxDataContext dataContext = new XmlSaxDataContext(resource, tableDef); final Schema schema = dataContext.getDefaultSchema(); final Table table = schema.getTableByName("/employee"); Query q = dataContext.query().from(table)  .select(table.getColumns()).toQuery(); try (DataSet result = dataContext.executeQuery(q)) {     while (result.next()) {  System.out.println("Row: " + Arrays.toString(result.getRow().getValues()));     } }    }}