Support
Quality
Security
License
Reuse
kandi has reviewed maxwell and discovered the below as its top functions. This is intended to give you an instant insight into maxwell implemented functionality, and help decide if they suit your requirements.
Maxwell's daemon, a mysql-to-json kafka producer
default
mysql> insert into `test`.`maxwell` set id = 1, daemon = 'Stanislaw Lem';
maxwell: {
"database": "test",
"table": "maxwell",
"type": "insert",
"ts": 1449786310,
"xid": 940752,
"commit": true,
"data": { "id":1, "daemon": "Stanislaw Lem" }
}
Could not find any factory for identifier 'avro-confluent' that implements 'org.apache.flink.table.factories.DeserializationFormatFactory'
lazy val mergeStrategy = Seq(
assembly / assemblyMergeStrategy := {
case "application.conf" => MergeStrategy.concat
case "reference.conf" => MergeStrategy.concat
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard
case _ => MergeStrategy.first
}
)
case "META-INF/services/org.apache.flink.table.factories.Factory" => MergeStrategy.concat
case "META-INF/services/org.apache.flink.table.factories.TableFactory" => MergeStrategy.concat
-----------------------
lazy val mergeStrategy = Seq(
assembly / assemblyMergeStrategy := {
case "application.conf" => MergeStrategy.concat
case "reference.conf" => MergeStrategy.concat
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard
case _ => MergeStrategy.first
}
)
case "META-INF/services/org.apache.flink.table.factories.Factory" => MergeStrategy.concat
case "META-INF/services/org.apache.flink.table.factories.TableFactory" => MergeStrategy.concat
get porper listbox item when using trace
def sauthor_list(self):
caut = frame1_lb.curselection()
print(caut)
saut = frame1_lb.get(caut[0]) # get the selected item
print(saut)
Insert colA into DF1 with vals from DF2['colB'] by matching colC in both DFs
import pandas as pd
dfa = pd.read_csv('csv_a.csv')
dfa.set_index('StockID', inplace=True)
dfb = pd.read_csv('csv_b.csv')
#remove incomplete rows (i.e. without Category/StockID columns)
dfb_tmp = dfb[dfb['StockID'].notnull()]
def myfunc(row):
# NB: Use row.name because row['StockID'] is the index
if row.name in list(dfb_tmp['StockID']):
return dfb_tmp.loc[dfb_tmp['StockID'] == row.name]['Category'].values[0]
dfa['Cat'] = dfa.apply(lambda row: myfunc(row), axis=1)
print(dfa)
StockID Brand ToolName Price Cat
ABC123 Maxwell ToolA 1.25 CatThis
BCD234 Charton ToolB 2.22 CatShop
CDE345 Bingley ToolC 3.33 CatThings
DEF789 Charton ToolD 1.44 CatShop
-----------------------
import pandas as pd
dfa = pd.read_csv('csv_a.csv')
dfa.set_index('StockID', inplace=True)
dfb = pd.read_csv('csv_b.csv')
#remove incomplete rows (i.e. without Category/StockID columns)
dfb_tmp = dfb[dfb['StockID'].notnull()]
def myfunc(row):
# NB: Use row.name because row['StockID'] is the index
if row.name in list(dfb_tmp['StockID']):
return dfb_tmp.loc[dfb_tmp['StockID'] == row.name]['Category'].values[0]
dfa['Cat'] = dfa.apply(lambda row: myfunc(row), axis=1)
print(dfa)
StockID Brand ToolName Price Cat
ABC123 Maxwell ToolA 1.25 CatThis
BCD234 Charton ToolB 2.22 CatShop
CDE345 Bingley ToolC 3.33 CatThings
DEF789 Charton ToolD 1.44 CatShop
-----------------------
df_a['Cat'] = df_a['StockID'].map(dict(zip(df_b['StockID'], df_b['Category'])))
df_a['Cat'] = df_a.index.map(dict(zip(df_b['StockID'], df_b['Category'])))
^^^^^
-----------------------
df_a['Cat'] = df_a['StockID'].map(dict(zip(df_b['StockID'], df_b['Category'])))
df_a['Cat'] = df_a.index.map(dict(zip(df_b['StockID'], df_b['Category'])))
^^^^^
No any output from hello world of node.js redis
(async () => {
const client = createClient();
client.on('error', (err) => console.log('Redis Client Error', err));
await client.connect();
await client.set('key', 'value');
const value = await client.get('key');
await client.quit();
})();
Plotting Maxwellian Distribution in Julia
julia> (m_e/(2*pi*k*T_M))^1.5
1.0769341115495682e-27
julia> ylims!(-1e-28, 2e-27)
-----------------------
julia> (m_e/(2*pi*k*T_M))^1.5
1.0769341115495682e-27
julia> ylims!(-1e-28, 2e-27)
How do i populate three sections in a tableview with SwiftyJSON
struct Section {
let name : String
let users : [User]
}
struct User : Decodable {
let name, password, username, authority : String
let id : Int
}
var sections = [Section]()
let jsonString = """
[{
"name" : "Oliver",
"password" : "1234",
"username" : "Ramy",
"id" : 84560,
"authority" : "Manager"
}, {
"name" : "Maxwell",
"password" : "1234",
"username" : "Omar",
"id" : 84561,
"authority" : "Accountant"
}, {
"name" : "Tom",
"password" : "1234",
"username" : "Ahmed",
"id" : 84562,
"authority" : "Accountant"
}]
"""
do {
let users = try JSONDecoder().decode([User].self, from: Data(jsonString.utf8))
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
print(sections)
} catch {
print(error)
}
func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sections[section].users.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "usersTVC", for: indexPath)
let user = sections[indexPath.section].users[indexPath.row]
cell.textLabel?.text = "ID: \(user.id) - \(user.name)"
return cell
}
AF.request(url, method: .post, parameters: data, headers: headers).responseDecodable(of: [User].self, decoder: JSONDecoder()) { response in
switch response.result {
case .success(let users):
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
case .failure(let error): print(error)
-----------------------
struct Section {
let name : String
let users : [User]
}
struct User : Decodable {
let name, password, username, authority : String
let id : Int
}
var sections = [Section]()
let jsonString = """
[{
"name" : "Oliver",
"password" : "1234",
"username" : "Ramy",
"id" : 84560,
"authority" : "Manager"
}, {
"name" : "Maxwell",
"password" : "1234",
"username" : "Omar",
"id" : 84561,
"authority" : "Accountant"
}, {
"name" : "Tom",
"password" : "1234",
"username" : "Ahmed",
"id" : 84562,
"authority" : "Accountant"
}]
"""
do {
let users = try JSONDecoder().decode([User].self, from: Data(jsonString.utf8))
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
print(sections)
} catch {
print(error)
}
func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sections[section].users.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "usersTVC", for: indexPath)
let user = sections[indexPath.section].users[indexPath.row]
cell.textLabel?.text = "ID: \(user.id) - \(user.name)"
return cell
}
AF.request(url, method: .post, parameters: data, headers: headers).responseDecodable(of: [User].self, decoder: JSONDecoder()) { response in
switch response.result {
case .success(let users):
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
case .failure(let error): print(error)
-----------------------
struct Section {
let name : String
let users : [User]
}
struct User : Decodable {
let name, password, username, authority : String
let id : Int
}
var sections = [Section]()
let jsonString = """
[{
"name" : "Oliver",
"password" : "1234",
"username" : "Ramy",
"id" : 84560,
"authority" : "Manager"
}, {
"name" : "Maxwell",
"password" : "1234",
"username" : "Omar",
"id" : 84561,
"authority" : "Accountant"
}, {
"name" : "Tom",
"password" : "1234",
"username" : "Ahmed",
"id" : 84562,
"authority" : "Accountant"
}]
"""
do {
let users = try JSONDecoder().decode([User].self, from: Data(jsonString.utf8))
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
print(sections)
} catch {
print(error)
}
func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sections[section].users.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "usersTVC", for: indexPath)
let user = sections[indexPath.section].users[indexPath.row]
cell.textLabel?.text = "ID: \(user.id) - \(user.name)"
return cell
}
AF.request(url, method: .post, parameters: data, headers: headers).responseDecodable(of: [User].self, decoder: JSONDecoder()) { response in
switch response.result {
case .success(let users):
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
case .failure(let error): print(error)
-----------------------
struct Section {
let name : String
let users : [User]
}
struct User : Decodable {
let name, password, username, authority : String
let id : Int
}
var sections = [Section]()
let jsonString = """
[{
"name" : "Oliver",
"password" : "1234",
"username" : "Ramy",
"id" : 84560,
"authority" : "Manager"
}, {
"name" : "Maxwell",
"password" : "1234",
"username" : "Omar",
"id" : 84561,
"authority" : "Accountant"
}, {
"name" : "Tom",
"password" : "1234",
"username" : "Ahmed",
"id" : 84562,
"authority" : "Accountant"
}]
"""
do {
let users = try JSONDecoder().decode([User].self, from: Data(jsonString.utf8))
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
print(sections)
} catch {
print(error)
}
func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sections[section].users.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "usersTVC", for: indexPath)
let user = sections[indexPath.section].users[indexPath.row]
cell.textLabel?.text = "ID: \(user.id) - \(user.name)"
return cell
}
AF.request(url, method: .post, parameters: data, headers: headers).responseDecodable(of: [User].self, decoder: JSONDecoder()) { response in
switch response.result {
case .success(let users):
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
case .failure(let error): print(error)
-----------------------
struct Section {
let name : String
let users : [User]
}
struct User : Decodable {
let name, password, username, authority : String
let id : Int
}
var sections = [Section]()
let jsonString = """
[{
"name" : "Oliver",
"password" : "1234",
"username" : "Ramy",
"id" : 84560,
"authority" : "Manager"
}, {
"name" : "Maxwell",
"password" : "1234",
"username" : "Omar",
"id" : 84561,
"authority" : "Accountant"
}, {
"name" : "Tom",
"password" : "1234",
"username" : "Ahmed",
"id" : 84562,
"authority" : "Accountant"
}]
"""
do {
let users = try JSONDecoder().decode([User].self, from: Data(jsonString.utf8))
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
print(sections)
} catch {
print(error)
}
func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sections[section].users.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "usersTVC", for: indexPath)
let user = sections[indexPath.section].users[indexPath.row]
cell.textLabel?.text = "ID: \(user.id) - \(user.name)"
return cell
}
AF.request(url, method: .post, parameters: data, headers: headers).responseDecodable(of: [User].self, decoder: JSONDecoder()) { response in
switch response.result {
case .success(let users):
let grouped = Dictionary(grouping: users, by: \.authority)
sections = grouped.map(Section.init)
case .failure(let error): print(error)
how to fix template does not exist in Django?
def html(request):
return render(request, 'Blog/index.html')
-----------------------
TEMPLATES = [
{
...
'DIRS': [os.path.join(BASE_DIR,'templates')],#add this line
...
]
return render(request,"template_folder/template_name.html")
-----------------------
TEMPLATES = [
{
...
'DIRS': [os.path.join(BASE_DIR,'templates')],#add this line
...
]
return render(request,"template_folder/template_name.html")
Collect similar terms in sympy
In [2]: e, a, m, n = symbols('e, a, m, n')
In [3]: sol = solveset(Eq((e-m)/(e+2*m), n*(a-m)/(a+2*m)), m)
In [4]: s1, s2 = sol.args[0]
In [5]: s1
Out[5]:
_____________________________________________________________________________
╱ 2 2 2 2 2 2 2 2 2
2⋅a⋅n + a - e⋅n - 2⋅e ╲╱ 4⋅a ⋅n + 4⋅a ⋅n + a + 4⋅a⋅e⋅n - 26⋅a⋅e⋅n + 4⋅a⋅e + e ⋅n + 4⋅e ⋅n + 4⋅e
───────────────────── - ────────────────────────────────────────────────────────────────────────────────
4⋅(n - 1) 4⋅(n - 1)
In [6]: s1.collect(e, lambda c: c.factor() if c.is_polynomial() else c)
Out[6]:
_______________________________________________________
╱ 2 2 ⎛ 2 ⎞ 2 2
a⋅(2⋅n + 1) + e⋅(-n - 2) ╲╱ a ⋅(2⋅n + 1) + 2⋅a⋅e⋅⎝2⋅n - 13⋅n + 2⎠ + e ⋅(n + 2)
──────────────────────── - ──────────────────────────────────────────────────────────
4⋅(n - 1) 4⋅(n - 1)
In [7]: s2.collect(e, lambda c: c.factor() if c.is_polynomial() else c)
Out[7]:
_______________________________________________________
╱ 2 2 ⎛ 2 ⎞ 2 2
a⋅(2⋅n + 1) + e⋅(-n - 2) ╲╱ a ⋅(2⋅n + 1) + 2⋅a⋅e⋅⎝2⋅n - 13⋅n + 2⎠ + e ⋅(n + 2)
──────────────────────── + ──────────────────────────────────────────────────────────
4⋅(n - 1) 4⋅(n - 1)
D3.js style and class override not work as my expectation
d3.select("table").select("tr").selectAll("td").classed("bold-header", true);
d3.select("table").selectAll("tr:not(:first-child)").style("background-color", "lightblue").style("width", "100px");
d3.select("table").select("tr").classed("bold-header", true);
.bold-header{
background-color:navy;
color:white;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/5.7.0/d3.min.js"></script>
<table border="1">
<tr>
<td>ID</td>
<td>Name</td>
</tr>
<tr>
<td>001</td>
<td>John</td>
</tr>
<tr>
<td>002</td>
<td>Alex</td>
</tr>
<tr>
<td>003</td>
<td>Maxwell</td>
</tr>
</table>
-----------------------
d3.select("table").select("tr").selectAll("td").classed("bold-header", true);
d3.select("table").selectAll("tr:not(:first-child)").style("background-color", "lightblue").style("width", "100px");
d3.select("table").select("tr").classed("bold-header", true);
.bold-header{
background-color:navy;
color:white;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/5.7.0/d3.min.js"></script>
<table border="1">
<tr>
<td>ID</td>
<td>Name</td>
</tr>
<tr>
<td>001</td>
<td>John</td>
</tr>
<tr>
<td>002</td>
<td>Alex</td>
</tr>
<tr>
<td>003</td>
<td>Maxwell</td>
</tr>
</table>
-----------------------
d3.select("table").select("tr").selectAll("td").classed("bold-header", true);
d3.select("table").selectAll("tr:not(:first-child)").style("background-color", "lightblue").style("width", "100px");
d3.select("table").select("tr").classed("bold-header", true);
.bold-header{
background-color:navy;
color:white;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/5.7.0/d3.min.js"></script>
<table border="1">
<tr>
<td>ID</td>
<td>Name</td>
</tr>
<tr>
<td>001</td>
<td>John</td>
</tr>
<tr>
<td>002</td>
<td>Alex</td>
</tr>
<tr>
<td>003</td>
<td>Maxwell</td>
</tr>
</table>
-----------------------
d3.select("table").select("tr").selectAll("td").classed("bold-header", true);
d3.select("table").selectAll("tr:not(:first-child)").style("background-color", "lightblue").style("width", "100px");
d3.select("table").select("tr").classed("bold-header", true);
.bold-header{
background-color:navy;
color:white;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/5.7.0/d3.min.js"></script>
<table border="1">
<tr>
<td>ID</td>
<td>Name</td>
</tr>
<tr>
<td>001</td>
<td>John</td>
</tr>
<tr>
<td>002</td>
<td>Alex</td>
</tr>
<tr>
<td>003</td>
<td>Maxwell</td>
</tr>
</table>
EASY: How do I separate elements grabbed by class name using Selenium Webdriver?
driver.find_elements_by_css_selector('.sceneColActors a')
driver.find_elements_by_class_name('sceneColActors')
-----------------------
driver.find_elements_by_css_selector('.sceneColActors a')
driver.find_elements_by_class_name('sceneColActors')
QUESTION
Could not find any factory for identifier 'avro-confluent' that implements 'org.apache.flink.table.factories.DeserializationFormatFactory'
Asked 2022-Feb-27 at 19:32I have a Flink job that runs well locally but fails when I try to flink run the job on cluster. The error happens when trying to load data from Kafka via 'connector' = 'kafka'. I am using Flink-Table API and confluent-avro format for reading data from Kafka.
So basically i created a table which reads data from kafka topic:
val inputTableSQL =
s"""CREATE TABLE input_table (
| -- key of the topic
| key BYTES NOT NULL,
|
| -- a few columns mapped to the Avro fields of the Kafka value
| id STRING,
|
|) WITH (
|
| 'connector' = 'kafka',
| 'topic' = '${KafkaConfiguration.InputTopicName}',
| 'scan.startup.mode' = 'latest-offset',
|
| -- UTF-8 string as Kafka keys, using the 'key' table column
| 'key.format' = 'raw',
| 'key.fields' = 'key',
|
| 'value.format' = 'avro-confluent',
| 'value.avro-confluent.schema-registry.url' = '${KafkaConfiguration.KafkaConsumerSchemaRegistryUrl}',
| 'value.fields-include' = 'EXCEPT_KEY'
|)
|""".stripMargin
val inputTable = tableEnv.executeSql(inputTableSQL)
and then i created another table, which i will use as output table:
val outputTableSQL =
s"""CREATE TABLE custom_avro_output_table (
| -- key of the topic
| key BYTES NOT NULL,
|
| -- a few columns mapped to the Avro fields of the Kafka value
| ID STRING
|) WITH (
|
| 'connector' = 'kafka',
| 'topic' = '${KafkaConfiguration.OutputTopicName}',
| 'properties.bootstrap.servers' = '${KafkaConfiguration.KafkaProducerBootstrapServers}',
|
| -- UTF-8 string as Kafka keys, using the 'key' table column
| 'key.format' = 'raw',
| 'key.fields' = 'key',
|
| $outputFormatSettings
| 'value.fields-include' = 'EXCEPT_KEY'
|)
|""".stripMargin
val outputTableCreationResult = tableEnv.executeSql(outputTableSQL)
val customInsertSQL =
"""INSERT INTO custom_avro_output_table
|SELECT key, id
| FROM input_table
| WHERE userAgent LIKE '%ost%'
|""".stripMargin
val customInsertResult = tableEnv.executeSql(customInsertSQL)
when i run this in local machine, everything works fine, but when i run it in cluster, it crashes.
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_282]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_282]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_282]
at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.12-1.13.1.jar:1.13.1]
... 13 more
Caused by: org.apache.flink.table.api.ValidationException: Could not find any factory for identifier 'avro-confluent' that implements 'org.apache.flink.table.factories.DeserializationFormatFactory' in the classpath.
Available factory identifiers are:
canal-json
csv
debezium-json
json
maxwell-json
raw
at org.apache.flink.table.factories.FactoryUtil.discoverFactory(FactoryUtil.java:319) ~[flink-table_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.table.factories.FactoryUtil$TableFactoryHelper.discoverOptionalFormatFactory(FactoryUtil.java:751) ~[flink-table_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.table.factories.FactoryUtil$TableFactoryHelper.discoverOptionalDecodingFormat(FactoryUtil.java:649) ~[flink-table_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.table.factories.FactoryUtil$TableFactoryHelper.discoverDecodingFormat(FactoryUtil.java:633) ~[flink-table_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicTableFactory.lambda$getValueDecodingFormat$2(KafkaDynamicTableFactory.java:279) ~[?:?]
at java.util.Optional.orElseGet(Optional.java:267) ~[?:1.8.0_282]
at org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicTableFactory.getValueDecodingFormat(KafkaDynamicTableFactory.java:277) ~[?:?]
at org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicTableFactory.createDynamicTableSource(KafkaDynamicTableFactory.java:142) ~[?:?]
at org.apache.flink.table.factories.FactoryUtil.createTableSource(FactoryUtil.java:134) ~[flink-table_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.table.planner.plan.schema.CatalogSourceTable.createDynamicTableSource(CatalogSourceTable.java:116) ~[flink-table-blink_2.12-1.13.1.jar:1.13.1]
at org.apache.flink.table.planner.plan.schema.CatalogSourceTable.toRel(CatalogSourceTable.java:82) ~[flink-table-blink_2.12-1.13.1.jar:1.13.1]
at org.apache.calcite.sql2rel.SqlToRelConverter.toRel(SqlToRelConverter.java:3585) ~[flink-table_2.12-1.13.1.jar:1.13.1]
following is my build.sbt:
val flinkVersion = "1.13.1"
val flinkDependencies = Seq(
"org.apache.flink" %% "flink-scala" % flinkVersion % Provided,
"org.apache.flink" %% "flink-streaming-scala" % flinkVersion % Provided,
"org.apache.flink" %% "flink-connector-kafka" % flinkVersion,
"org.apache.flink" %% "flink-clients" % flinkVersion % Provided,
"org.apache.flink" %% "flink-table-api-scala-bridge" % flinkVersion % Provided,
"org.apache.flink" %% "flink-table-planner-blink" % flinkVersion % Provided,
"org.apache.flink" % "flink-table-common" % flinkVersion % Provided,
"org.apache.flink" % "flink-avro-confluent-registry" % flinkVersion,
"org.apache.flink" % "flink-json" % flinkVersion,
"com.webtrekk" % "wd.generated" % "2.2.3",
"com.webtrekk" % "wd.generated.public" % "2.2.0",
"ch.qos.logback" % "logback-classic" % "1.2.3"
)
Similar issue has been posted in Flink 1.12 Could not find any factory for identifier 'kafka' that implements 'org.apache.flink.table.factories.DynamicTableFactory' in the classpath but the solution of adding provided is not working in my case.
ANSWER
Answered 2021-Oct-26 at 17:47I was able to fix this problem using following approach:
In my build.sbt, there was the following mergeStrategy:
lazy val mergeStrategy = Seq(
assembly / assemblyMergeStrategy := {
case "application.conf" => MergeStrategy.concat
case "reference.conf" => MergeStrategy.concat
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard
case _ => MergeStrategy.first
}
)
I appended the following chunk in it, hence resolved my exception:
case "META-INF/services/org.apache.flink.table.factories.Factory" => MergeStrategy.concat
case "META-INF/services/org.apache.flink.table.factories.TableFactory" => MergeStrategy.concat
Community Discussions, Code Snippets contain sources that include Stack Exchange Network
No vulnerabilities reported
Save this library and start creating your kit
Save this library and start creating your kit