Find the Smallest Common Multiple

Tell us what’s happening:
Can anyone please help me out where is the bug and why this doesn’t work?

Your code so far


function smallestCommons(arr) {

  /* let output=[]; */

function arr1(numOne){
     let increment=numOne;

    let newOne=[];//should be console

    for(let i=1;i<10;i++){
    newOne.push(numOne+increment);
    numOne+=increment;
}
return newOne;

}
/* function arr2(numTwo){
  let newIncrement=numTwo;
  let newArr=[numTwo];
  for(let j=1;j<10;j++){
  newArr.push(numTwo+newIncrement);
  numTwo+=newIncrement;
}
return newArr;
} */
let array1=arr1(arr[0]);
let array2=arr1(arr[1]);
function common(one, two) {
  let result=one.filter(value => -1 !== two.indexOf(value));
  return result
}
let newArray=common(array1,array2);
let finalResult=Math.min.apply(null,newArray);
return finalResult;
}

smallestCommons([1, 5]);

Your browser information:

User Agent is: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36.

Link to the challenge:
https://learn.freecodecamp.org/javascript-algorithms-and-data-structures/intermediate-algorithm-scripting/smallest-common-multiple/

I can’t debug the code as I am on the phone, but it doesn’t seem to have syntax issues

But if your input is [1,5], you need to find not the minimum common multiple between 1 and 5, but also of 2, 3 and 4

Please also use variable names that let someone else understand what that variable or function should do.

As logic flaw: your code start from the hypothesis that the minimum common multiple is in the first ten multiples of the starting number, that is most often not true